Snap for 7550930 from dc4c38f9f2586ec3dc0ccd611caa589b6452603e to mainline-resolv-release

Change-Id: I64d4abe5d4d4c725d9a83e18d2228d109ff67bd3
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..66e486e
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,31 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["tools_treble_license"],
+}
+
+// Added automatically by a large-scale-change
+// See: http://go/android-license-faq
+license {
+    name: "tools_treble_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+    ],
+    license_text: [
+        "LICENSE",
+    ],
+}
diff --git a/build/Android.bp b/build/Android.bp
index 85ca3e9..7d1731e 100644
--- a/build/Android.bp
+++ b/build/Android.bp
@@ -12,9 +12,18 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "tools_treble_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["tools_treble_license"],
+}
+
 python_defaults {
     name: "treble_build_default",
-    pkg_path: "treble",
+    pkg_path: "treble/build",
     version: {
         py2: {
             enabled: false,
@@ -25,18 +34,36 @@
     },
 }
 
+python_library_host {
+  name: "treble_build_lib",
+  defaults: ["treble_build_default"],
+  srcs: [
+      "sandbox/build_android_sandboxed.py",
+      "sandbox/config.py",
+      "sandbox/nsjail.py",
+      "sandbox/overlay.py",
+      "sandbox/rbe.py",
+  ],
+}
+
 python_test_host {
     name: "treble_build_test",
     main: "test.py",
     defaults: ["treble_build_default"],
     srcs: [
-        "sandbox/nsjail.py",
+        "sandbox/build_android_sandboxed_test.py",
+        "sandbox/config_test.py",
         "sandbox/nsjail_test.py",
-        "sandbox/overlay.py",
         "sandbox/overlay_test.py",
+        "sandbox/rbe_test.py",
         "test.py",
         "sample_test.py",
     ],
+    libs: [
+        "treble_build_lib",
+    ],
     test_config: "test.xml",
-    test_suites: ["general-tests"],
+    test_options: {
+        unit_test: true,
+    },
 }
diff --git a/build/TEST_MAPPING b/build/TEST_MAPPING
deleted file mode 100644
index 9d49b72..0000000
--- a/build/TEST_MAPPING
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "presubmit" : [
-    {
-      "name" : "treble_build_test",
-      "host" : true
-    }
-  ]
-}
diff --git a/build/sandbox/build_android_sandboxed.py b/build/sandbox/build_android_sandboxed.py
new file mode 100644
index 0000000..f6a1b57
--- /dev/null
+++ b/build/sandbox/build_android_sandboxed.py
@@ -0,0 +1,212 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Builds an Android target in a secure sandbox."""
+
+import argparse
+import os
+from . import config
+from . import nsjail
+from . import rbe
+
+_DEFAULT_COMMAND_WRAPPER = \
+  '/src/tools/treble/build/sandbox/build_android_target.sh'
+
+
+def build(build_target, variant, nsjail_bin, chroot, dist_dir, build_id,
+          max_cpus, build_goals, config_file=None,
+          command_wrapper=_DEFAULT_COMMAND_WRAPPER, use_rbe=False,
+          readonly_bind_mount=None, env=[]):
+  """Builds an Android target in a secure sandbox.
+
+  Args:
+    build_target: A string with the name of the build target.
+    variant: A string with the build variant.
+    nsjail_bin: A string with the path to the nsjail binary.
+    chroot: A string with the path to the chroot of the NsJail sandbox.
+    dist_dir: A string with the path to the Android dist directory.
+    build_id: A string with the Android build identifier.
+    max_cpus: An integer with maximum number of CPUs.
+    build_goals: A list of strings with the goals and options to provide to the
+      build command.
+    config_file: A string path to an overlay configuration file.
+    command_wrapper: A string path to the command wrapper.
+    use_rbe: If true, will attempt to use RBE for the build.
+    readonly_bind_mount: A string path to a path to be mounted as read-only.
+    env: An array of environment variables to define in the NsJail sandbox in the
+      `var=val` syntax.
+
+  Returns:
+    A list of commands that were executed. Each command is a list of strings.
+  """
+  if config_file:
+    cfg = config.Config(config_file)
+    android_target = cfg.get_build_config_android_target(build_target)
+    if cfg.has_tag(build_target, 'skip'):
+      print('Warning: skipping build_target "{}" due to tag being set'.format(build_target))
+      return []
+  else:
+    android_target = build_target
+
+  # All builds are required to run with the root of the
+  # Android source tree as the current directory.
+  source_dir = os.getcwd()
+  command = [
+      command_wrapper,
+      '%s-%s' % (android_target, variant),
+      '/src',
+      'make',
+      '-j',
+  ] + build_goals
+
+  readonly_bind_mounts = []
+  if readonly_bind_mount:
+    readonly_bind_mounts = [readonly_bind_mount]
+
+  extra_nsjail_args = []
+  cleanup = lambda: None
+  nsjail_wrapper = []
+  if use_rbe:
+    cleanup = rbe.setup(env)
+    env = rbe.prepare_env(env)
+    extra_nsjail_args.extend(rbe.get_extra_nsjail_args())
+    readonly_bind_mounts.extend(rbe.get_readonlybind_mounts())
+    nsjail_wrapper = rbe.get_nsjail_bin_wrapper()
+
+  ret = nsjail.run(
+      nsjail_bin=nsjail_bin,
+      chroot=chroot,
+      overlay_config=config_file,
+      source_dir=source_dir,
+      command=command,
+      build_target=build_target,
+      dist_dir=dist_dir,
+      build_id=build_id,
+      max_cpus=max_cpus,
+      extra_nsjail_args=extra_nsjail_args,
+      readonly_bind_mounts=readonly_bind_mounts,
+      env=env,
+      nsjail_wrapper=nsjail_wrapper)
+
+  cleanup()
+
+  return ret
+
+
+def arg_parser():
+  """Returns an ArgumentParser for sanboxed android builds."""
+  # Use the top level module docstring for the help description
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      '--build_target',
+      help='The build target.')
+  parser.add_argument(
+      '--variant', default='userdebug', help='The Android build variant.')
+  parser.add_argument(
+      '--nsjail_bin',
+      required=True,
+      help='Path to NsJail binary.')
+  parser.add_argument(
+      '--chroot',
+      required=True,
+      help='Path to the chroot to be used for building the Android '
+      'platform. This will be mounted as the root filesystem in the '
+      'NsJail sandbox.')
+  parser.add_argument(
+      '--config_file',
+      required=True,
+      help='Path to the overlay configuration file.')
+  parser.add_argument(
+      '--command_wrapper',
+      default=_DEFAULT_COMMAND_WRAPPER,
+      help='Path to the command wrapper. '
+        'Defaults to \'%s\'.' % _DEFAULT_COMMAND_WRAPPER)
+  parser.add_argument(
+      '--readonly_bind_mount',
+      help='Path to the a path to be mounted as readonly inside the secure '
+      'build sandbox.')
+  parser.add_argument(
+      '--env', '-e',
+      type=str,
+      default=[],
+      action='append',
+      help='Specify an environment variable to the NSJail sandbox. Can be specified '
+      'muliple times. Syntax: var_name=value')
+  parser.add_argument(
+      '--dist_dir',
+      help='Path to the Android dist directory. This is where '
+      'Android platform release artifacts will be written.')
+  parser.add_argument(
+      '--build_id',
+      help='Build identifier what will label the Android platform '
+      'release artifacts.')
+  parser.add_argument(
+      '--max_cpus',
+      type=int,
+      help='Limit of concurrent CPU cores that the NsJail sanbox '
+      'can use.')
+  parser.add_argument(
+      '--context',
+      action='append',
+      default=[],
+      help='One or more contexts used to select build goals from the '
+      'configuration.')
+  parser.add_argument(
+      '--use_rbe',
+      action='store_true',
+      help='Executes the build on RBE')
+  return parser
+
+
+def parse_args(parser):
+  """Parses command line arguments.
+
+  Returns:
+    A dict of all the arguments parsed.
+  """
+  # Convert the Namespace object to a dict
+  return vars(parser.parse_args())
+
+
+def main():
+  args = parse_args(arg_parser())
+
+  # The --build_target argument could not be required
+  # using the standard 'required' argparse option because
+  # the argparser is reused by merge_android_sandboxed.py which
+  # does not require --build_target.
+  if args['build_target'] is None:
+    raise ValueError('--build_target is required.')
+
+  cfg = config.Config(args['config_file'])
+  build_goals = cfg.get_build_goals(args['build_target'], set(args['context']))
+
+  build(
+      build_target=args['build_target'],
+      variant=args['variant'],
+      nsjail_bin=args['nsjail_bin'],
+      chroot=args['chroot'],
+      config_file=args['config_file'],
+      command_wrapper=args['command_wrapper'],
+      readonly_bind_mount=args['readonly_bind_mount'],
+      env=args['env'],
+      dist_dir=args['dist_dir'],
+      build_id=args['build_id'],
+      max_cpus=args['max_cpus'],
+      use_rbe=args['use_rbe'],
+      build_goals=build_goals)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/sandbox/build_android_sandboxed_test.py b/build/sandbox/build_android_sandboxed_test.py
new file mode 100644
index 0000000..d4848c4
--- /dev/null
+++ b/build/sandbox/build_android_sandboxed_test.py
@@ -0,0 +1,216 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test build_android_sandboxed."""
+
+import os
+import tempfile
+import unittest
+from . import build_android_sandboxed
+
+
+class BuildAndroidSandboxedTest(unittest.TestCase):
+
+  def testBasic(self):
+    build_android_sandboxed.nsjail.__file__ = '/'
+    os.chdir('/')
+    commands = build_android_sandboxed.build(
+        'target_name',
+        'userdebug',
+        nsjail_bin='/bin/true',
+        chroot='/chroot',
+        dist_dir='/dist_dir',
+        build_id='0',
+        max_cpus=1,
+        build_goals=['droid', 'dist'])
+
+    self.assertEqual(
+        commands,
+        [
+            '/bin/true',
+            '--env', 'USER=nobody',
+            '--config', '/nsjail.cfg',
+            '--env', 'BUILD_NUMBER=0',
+            '--max_cpus=1',
+            '--env', 'DIST_DIR=/dist',
+            '--bindmount', '/:/src',
+            '--bindmount', '/dist_dir:/dist',
+            '--',
+            '/src/tools/treble/build/sandbox/build_android_target.sh',
+            'target_name-userdebug',
+            '/src',
+            'make', '-j', 'droid', 'dist',
+        ]
+    )
+
+  def testBuildCommand(self):
+    build_android_sandboxed.nsjail.__file__ = '/'
+    os.chdir('/')
+    commands = build_android_sandboxed.build(
+        'target_name',
+        'userdebug',
+        nsjail_bin='/bin/true',
+        command_wrapper='/command/wrapper',
+        chroot='/chroot',
+        dist_dir='/dist_dir',
+        build_id='0',
+        max_cpus=1,
+        build_goals=['droid', 'dist'])
+
+    self.assertEqual(
+        commands,
+        [
+            '/bin/true',
+            '--env', 'USER=nobody',
+            '--config', '/nsjail.cfg',
+            '--env', 'BUILD_NUMBER=0',
+            '--max_cpus=1',
+            '--env', 'DIST_DIR=/dist',
+            '--bindmount', '/:/src',
+            '--bindmount', '/dist_dir:/dist',
+            '--',
+            '/command/wrapper',
+            'target_name-userdebug',
+            '/src',
+            'make', '-j', 'droid', 'dist',
+        ]
+    )
+
+  def testUser(self):
+    build_android_sandboxed.nsjail.__file__ = '/'
+    os.chdir('/')
+    commands = build_android_sandboxed.build(
+        'target_name',
+        'user',
+        nsjail_bin='/bin/true',
+        chroot='/chroot',
+        dist_dir='/dist_dir',
+        build_id='0',
+        max_cpus=1,
+        build_goals=['droid', 'dist'])
+
+    self.assertEqual(
+        commands,
+        [
+            '/bin/true',
+            '--env', 'USER=nobody',
+            '--config', '/nsjail.cfg',
+            '--env', 'BUILD_NUMBER=0',
+            '--max_cpus=1',
+            '--env', 'DIST_DIR=/dist',
+            '--bindmount', '/:/src',
+            '--bindmount', '/dist_dir:/dist',
+            '--',
+            '/src/tools/treble/build/sandbox/build_android_target.sh',
+            'target_name-user',
+            '/src',
+            'make', '-j', 'droid', 'dist',
+        ]
+    )
+
+  def testExtraBuildGoals(self):
+    build_android_sandboxed.nsjail.__file__ = '/'
+    os.chdir('/')
+    commands = build_android_sandboxed.build(
+        'target_name',
+        'userdebug',
+        nsjail_bin='/bin/true',
+        chroot='/chroot',
+        dist_dir='/dist_dir',
+        build_id='0',
+        max_cpus=1,
+        build_goals=['droid', 'dist', 'extra_build_target'])
+
+    self.assertEqual(
+        commands,
+        [
+            '/bin/true',
+            '--env', 'USER=nobody',
+            '--config', '/nsjail.cfg',
+            '--env', 'BUILD_NUMBER=0',
+            '--max_cpus=1',
+            '--env', 'DIST_DIR=/dist',
+            '--bindmount', '/:/src',
+            '--bindmount', '/dist_dir:/dist',
+            '--',
+            '/src/tools/treble/build/sandbox/build_android_target.sh',
+            'target_name-userdebug',
+            '/src',
+            'make', '-j', 'droid', 'dist',
+            'extra_build_target'
+        ]
+    )
+
+  def testSkipBuildTag(self):
+    TEST_CONFIG_XML = """<config>
+      <target name="target_skip" tags="skip">
+        <build_config>
+          <goal name="droid"/>
+        </build_config>
+      </target>
+    </config>
+    """
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(TEST_CONFIG_XML)
+      test_config.flush()
+      build_android_sandboxed.nsjail.__file__ = '/'
+      os.chdir('/')
+      skip_commands = build_android_sandboxed.build(
+        'target_skip',
+        'userdebug',
+        nsjail_bin='/bin/true',
+        chroot='/chroot',
+        dist_dir='/dist_dir',
+        build_id='0',
+        max_cpus=1,
+        build_goals=[],
+        config_file=test_config.name)
+      self.assertFalse(skip_commands)
+
+  def testEnv(self):
+    build_android_sandboxed.nsjail.__file__ = '/'
+    os.chdir('/')
+    commands = build_android_sandboxed.build(
+        'target_name',
+        'userdebug',
+        nsjail_bin='/bin/true',
+        chroot='/chroot',
+        dist_dir='/dist_dir',
+        build_id='0',
+        max_cpus=1,
+        build_goals=['droid', 'dist'],
+        env=['first_env_var=first_value', 'second_env_var=second_value'])
+
+    self.assertEqual(
+        commands,
+        [
+            '/bin/true',
+            '--env', 'USER=nobody',
+            '--config', '/nsjail.cfg',
+            '--env', 'BUILD_NUMBER=0',
+            '--max_cpus=1',
+            '--env', 'DIST_DIR=/dist',
+            '--bindmount', '/:/src',
+            '--bindmount', '/dist_dir:/dist',
+            '--env', 'first_env_var=first_value',
+            '--env', 'second_env_var=second_value',
+            '--',
+            '/src/tools/treble/build/sandbox/build_android_target.sh',
+            'target_name-userdebug',
+            '/src',
+            'make', '-j', 'droid', 'dist',
+        ]
+    )
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/sandbox/build_android_target.sh b/build/sandbox/build_android_target.sh
new file mode 100755
index 0000000..23e05ea
--- /dev/null
+++ b/build/sandbox/build_android_target.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+readonly ANDROID_TARGET=$1
+readonly BUILD_DIR=$2
+shift
+shift
+readonly BUILD_COMMAND="$@"
+
+if [[ -z "${ANDROID_TARGET}" ]]; then
+  echo "error: Android target not set"
+  exit 1
+fi
+
+if [[ -z "${BUILD_DIR}" ]]; then
+  echo "error: Build directory not set"
+  exit 1
+fi
+
+if [[ -z "${BUILD_COMMAND}" ]]; then
+  echo "error: Build command not set"
+  exit 1
+fi
+
+# If there is an error, exit right away instead of continuing. For example,
+# lunch could fail. If so, there is no point in continuing the build.
+
+set -e
+
+echo "build_android_target.sh: source build/envsetup.sh"
+source build/envsetup.sh
+echo "build_android_target.sh: lunch $ANDROID_TARGET"
+lunch "$ANDROID_TARGET"
+echo "build_android_target.sh: cd $BUILD_DIR"
+cd "$BUILD_DIR"
+
+# However, the build command itself cannot use set -e. I haven't figured this
+# out yet, but something in the build command causes early exit for some
+# targets.
+
+set +e
+
+echo "build_android_target.sh: $BUILD_COMMAND"
+$BUILD_COMMAND
+BUILD_COMMAND_EXIT_VALUE=$?
+
+# Collect RBE metrics if enabled
+if [[ -n "${USE_RBE}" && -n "${RBE_DIR}" ]]; then
+  echo "build_android_target.sh: $RBE_DIR/dumpstats"
+  $RBE_DIR/dumpstats
+fi
+
+exit $BUILD_COMMAND_EXIT_VALUE
diff --git a/build/sandbox/config.py b/build/sandbox/config.py
new file mode 100644
index 0000000..26bccbd
--- /dev/null
+++ b/build/sandbox/config.py
@@ -0,0 +1,513 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Parses config file and provides various ways of using it."""
+
+import xml.etree.ElementTree as ET
+import collections
+
+# The config file must be in XML with a structure as descibed below.
+#
+# The top level config element shall contain one or more "target" child
+# elements. Each of these may contain one or more build_config child elements.
+# The build_config child elements will inherit the properties of the target
+# parent.
+#
+# Each "target" and "build_config" may contain the following:
+#
+# Attributes:
+#
+#   name: The name of the target.
+#
+#   android_target: The name of the android target used with lunch
+#
+#   allow_readwrite_all: "true" if the full source folder shall be mounted as
+#   read/write. It should be accompanied by a comment with the bug describing
+#   why it was required.
+#
+#   tags: A comma-separated list of strings to be associated with the target
+#     and any of its nested build_targets. You can use a tag to associate
+#     information with a target in your configuration file, and retrieve that
+#     information using the get_tags API or the has_tag API.
+#
+# Child elements:
+#
+#   config: A generic name-value configuration element.
+#
+#     Attributes:
+#       name: Name of the configuration
+#       value: Value of the configuration
+#
+#   overlay: An overlay to be mounted while building the target.
+#
+#     Attributes:
+#
+#       name: The name of the overlay.
+#
+#     Child elements:
+#
+#       replacement_path:  An overlay path that supersedes any conflicts
+#         after it.
+#
+#         Properties:
+#
+#           name: The name of the replacement path. This path will will
+#             superced the same path for any subsequent conflicts. If two
+#             overlays have the same replacement path an error will occur.
+#
+#
+#   view: A map (optionally) specifying a filesystem view mapping for each
+#     target.
+#
+#     Attributes:
+#
+#       name: The name of the view.
+#
+#   allow_readwrite: A folder to mount read/write
+#   inside the Android build nsjail. Each allowed read-write entry should be
+#   accompanied by a bug that indicates why it was required and tracks the
+#   progress to a fix.
+#
+#     Attributes:
+#
+#       path: The path to be allowed read-write mounting.
+#
+#   build_config: A list of goals to be used while building the target.
+#
+#     Attributes:
+#
+#       name: The name of the build config. Defaults to the target name
+#         if not set.
+#
+#     Child elements:
+#
+#       goal: A build goal.
+#
+#         Properties:
+#
+#           name: The name of the build goal. The build tools pass the name
+#             attribute as a parameter to make. This can have a value like
+#             "droid" or "VAR=value".
+#
+#           contexts: A comma-separated list of the contexts in which this
+#             goal applies. If this attribute is missing or blank, the goal
+#             applies to all contexts. Otherwise, it applies only in the
+#             requested contexts (see get_build_goals).
+
+Overlay = collections.namedtuple('Overlay', ['name', 'replacement_paths'])
+
+class BuildConfig(object):
+  """Represents configuration of a build_target.
+
+  Attributes:
+    name: name of the build_target used to pull the configuration.
+    android_target: The name of the android target used with lunch.
+    tags: List of tags associated with the build target config
+    build_goals: List of goals to be used while building the target.
+    overlays: List of overlays to be mounted.
+    views: A list of (source, destination) string path tuple to be mounted.
+      See view nodes in XML.
+    allow_readwrite_all: If true, mount source tree as rw.
+    allow_readwrite: List of directories to be mounted as rw.
+    allowed_projects_file: a string path name of a file with a containing
+      allowed projects.
+    configurations: a map of name to value configurations
+  """
+
+  def __init__(self,
+               name,
+               android_target,
+               tags=frozenset(),
+               build_goals=(),
+               overlays=(),
+               views=(),
+               allow_readwrite_all=False,
+               allow_readwrite=(),
+               allowed_projects_file=None,
+               configurations=None):
+    super().__init__()
+    self.name = name
+    self.android_target = android_target
+    self.tags = tags
+    self.build_goals = list(build_goals)
+    self.overlays = list(overlays)
+    self.views = list(views)
+    self.allow_readwrite_all = allow_readwrite_all
+    self.allow_readwrite = list(allow_readwrite)
+    self.allowed_projects_file = allowed_projects_file
+    self.configurations = configurations or {}
+
+  def validate(self):
+    """Run tests to validate build configuration"""
+    if not self.name:
+      raise ValueError('Error build_config must have a name.')
+    # Validate that a build config does not contain an overlay with
+    # conflicting replacement paths.
+    if len(self.overlays) > 1 and set.intersection(
+        *[o.replacement_paths for o in self.overlays]):
+      raise ValueError(
+          'Error build_config overlays have conflicting replacement_paths.')
+
+  @classmethod
+  def from_config(cls, config_elem, fs_view_map, base_config=None):
+    """Creates a BuildConfig from a config XML element and an optional
+      base_config.
+
+    Args:
+      config_elem: the config XML node element to build the configuration
+      fs_view_map: A map of view names to list of tuple(source, destination)
+        paths.
+      base_config: the base BuildConfig to use
+
+    Returns:
+      A build config generated from the config element and the base
+      configuration if provided.
+    """
+    if base_config is None:
+      # Build a base_config with required elements from the new config_elem
+      name = config_elem.get('name')
+      base_config = cls(
+          name=name, android_target=config_elem.get('android_target', name))
+
+    return cls(
+        android_target=config_elem.get('android_target',
+                                       base_config.android_target),
+        name=config_elem.get('name', base_config.name),
+        allowed_projects_file=config_elem.get(
+            'allowed_projects_file', base_config.allowed_projects_file),
+        build_goals=_get_build_config_goals(config_elem,
+                                            base_config.build_goals),
+        tags=_get_config_tags(config_elem, base_config.tags),
+        overlays=_get_overlays(config_elem, base_config.overlays),
+        allow_readwrite=_get_allow_readwrite(config_elem,
+                                             base_config.allow_readwrite),
+        views=_get_views(config_elem, fs_view_map, base_config.views),
+        allow_readwrite_all=_get_allowed_readwrite_all(
+            config_elem, base_config.allow_readwrite_all),
+        configurations=_get_configurations(config_elem,
+                                           base_config.configurations)
+    )
+
+
+def _get_configurations(config_elem, base):
+  configs = dict(base)
+  configs.update({
+      config.get('name'): config.get('value')
+      for config in config_elem.findall('config')
+  })
+  return configs
+
+
+def _get_build_config_goals(config_elem, base=None):
+  """Retrieves goals from build_config or target.
+
+  Args:
+    config_elem: A build_config or target xml element.
+    base: Initial list of goals to prepend to the list
+
+  Returns:
+    A list of tuples where the first element of the tuple is the build goal
+    name, and the second is a list of the contexts to which this goal applies.
+  """
+
+  return base + [(goal.get('name'), set(goal.get('contexts').split(','))
+                  if goal.get('contexts') else None)
+                 for goal in config_elem.findall('goal')]
+
+
+def _get_config_tags(config_elem, base=frozenset()):
+  """Retrieves tags from build_config or target.
+
+  Args:
+    config_elem: A build_config or target xml element.
+    base: Initial list of tags to seed the set
+
+  Returns:
+    A set of tags for a build_config.
+  """
+  tags = config_elem.get('tags')
+  return base.union(set(tags.split(',')) if tags else set())
+
+
+def _get_allowed_readwrite_all(config_elem, default=False):
+  """Determines if build_config or target is set to allow readwrite for all
+    source paths.
+
+  Args:
+    config_elem: A build_config or target xml element.
+    default: Value to use if element doesn't contain the
+      allow_readwrite_all attribute.
+
+  Returns:
+    True if build config is set to allow readwrite for all sorce paths
+  """
+  value = config_elem.get('allow_readwrite_all')
+  return value == 'true' if value else default
+
+
+def _get_overlays(config_elem, base=None):
+  """Retrieves list of overlays from build_config or target.
+
+  Args:
+    config_elem: A build_config or target xml element.
+    base: Initial list of overlays to prepend to the list
+
+  Returns:
+    A list of tuples of overlays and replacement paths to mount for a build_config or target.
+  """
+  overlays = []
+  for overlay in config_elem.findall('overlay'):
+    overlays.append(
+        Overlay(
+            name=overlay.get('name'),
+            replacement_paths=set([
+                path.get('path') for path in overlay.findall('replacement_path')
+            ])))
+  return base + overlays
+
+def _get_views(config_elem, fs_view_map, base=None):
+  """Retrieves list of views from build_config or target.
+
+  Args:
+    config_elem: A build_config or target xml element.
+    base: Initial list of views to prepend to the list
+
+  Returns:
+    A list of (source, destination) string path tuple to be mounted. See view
+      nodes in XML.
+  """
+  return base + [fs for o in config_elem.findall('view')
+                 for fs in fs_view_map[o.get('name')]]
+
+
+def _get_allow_readwrite(config_elem, base=None):
+  """Retrieves list of directories to be mounted rw from build_config or
+    target.
+
+  Args:
+    config_elem: A build_config or target xml element.
+    base: Initial list of rw directories to prepend to the list
+
+  Returns:
+    A list of directories to be mounted rw.
+  """
+  return (base +
+          [o.get('path') for o in config_elem.findall('allow_readwrite')])
+
+
+def _get_fs_view_map(config):
+  """Retrieves the map of filesystem views.
+
+  Args:
+    config: An XML Element that is the root of the config XML tree.
+
+  Returns:
+    A dict of filesystem views keyed by view name. A filesystem view is a
+    list of (source, destination) string path tuples.
+  """
+  # A valid config file is not required to include FS Views, only overlay
+  # targets.
+  return {
+      view.get('name'): [(path.get('source'), path.get('destination'))
+                         for path in view.findall('path')
+                        ] for view in config.findall('view')
+  }
+
+
+def _get_build_config_map(config):
+  """Retrieves a map of all build config.
+
+  Args:
+    config: An XML Element that is the root of the config XML tree.
+
+  Returns:
+    A dict of BuildConfig keyed by build_target.
+  """
+  fs_view_map = _get_fs_view_map(config)
+  build_config_map = {}
+  for target_config in config.findall('target'):
+    base_target = BuildConfig.from_config(target_config, fs_view_map)
+
+    for build_config in target_config.findall('build_config'):
+      build_target = BuildConfig.from_config(build_config, fs_view_map,
+                                             base_target)
+      build_target.validate()
+      build_config_map[build_target.name] = build_target
+
+  return build_config_map
+
+
+class Config:
+  """Presents an API to the static XML configuration."""
+
+  def __init__(self, config_filename):
+    """Initializes a Config instance from the specificed filename
+
+    This method parses the XML content of the file named by config_filename
+    into internal data structures. You can then use various methods to query
+    the static config.
+
+    Args:
+      config_filename: The name of the file from which to load the config.
+    """
+
+    tree = ET.parse(config_filename)
+    config = tree.getroot()
+    self._build_config_map = _get_build_config_map(config)
+
+  def get_available_build_targets(self):
+    """Return a list of available build targets."""
+    return sorted(self._build_config_map.keys())
+
+  def get_tags(self, build_target):
+    """Given a build_target, return the (possibly empty) set of tags."""
+    return self._build_config_map[build_target].tags
+
+  def has_tag(self, build_target, tag):
+    """Return true if build_target has tag.
+
+    Args:
+      build_target: A string build_target to be queried.
+      tag: A string tag that this target may have.
+
+    Returns:
+      If the build_target has the tag, True. Otherwise, False.
+    """
+    return tag in self._build_config_map[build_target].tags
+
+  def get_allowed_projects_file(self, build_target):
+    """Given a build_target, return a string with the allowed projects file."""
+    return self._build_config_map[build_target].allowed_projects_file
+
+  def get_build_config_android_target(self, build_target):
+    """Given a build_target, return an android_target.
+
+    Generally a build_target maps directory to the android_target of the same
+    name, but they can differ. In a config.xml file, the name attribute of a
+    target element is the android_target (which is used for lunch). The name
+    attribute (if any) of a build_config element is the build_target. If a
+    build_config element does not have a name attribute, then the build_target
+    is the android_target.
+
+    Args:
+      build_target: A string build_target to be queried.
+
+    Returns:
+      A string android_target that can be used for lunch.
+    """
+    return self._build_config_map[build_target].android_target
+
+  def get_build_goals(self, build_target, contexts=frozenset()):
+    """Given a build_target and a context, return a list of build goals.
+
+    For a given build_target, we may build in a variety of contexts. For
+    example we might build in continuous integration, or we might build
+    locally, or other contexts defined by the configuration file and scripts
+    that use it. The contexts parameter is a set of strings that specify the
+    contexts for which this function should retrieve goals.
+
+    In the configuration file, each goal has a contexts attribute, which
+    specifies the contexts to which the goal applies. We treat a goal with no
+    contexts attribute as applying to all contexts.
+
+    Example:
+
+      <build_config>
+        <goal name="droid"/>
+        <goal name="dist" contexts="ota"/>
+      </build_config>
+
+      Here we have the goal "droid", which matches all contexts, and the goal
+      "dist", which matches the "ota" context. Invoking this method with the
+      set(['ota']) would return ['droid', 'dist'].
+
+    Args:
+      build_target: A string build_target to be queried.
+      context: A set of contexts for which to retrieve goals.
+
+    Returns:
+      A list of strings, where each string is a goal to be passed to make.
+    """
+
+    build_goals = []
+    for goal, build_contexts in self._build_config_map[
+        build_target].build_goals:
+      if not build_contexts:
+        build_goals.append(goal)
+      elif build_contexts.intersection(contexts):
+        build_goals.append(goal)
+
+    return build_goals
+
+  def get_rw_allowlist_map(self):
+    """Return read-write allowlist map.
+
+    Returns:
+      A dict of string lists of keyed by target name. Each value in the dict is
+      a list of allowed read-write paths corresponding to the target.
+    """
+    return {b.name: b.allow_readwrite for b in self._build_config_map.values()}
+
+  def get_allow_readwrite_all(self, build_target):
+    """Return True if the target should mount all its source as read-write.
+
+    Args:
+      build_target: A string build_target to be queried.
+
+    Returns:
+      True if the target should mount all its source as read-write.
+    """
+    return self._build_config_map[build_target].allow_readwrite_all
+
+  def get_overlay_map(self):
+    """Return the overlay map.
+
+    Returns:
+      A dict of keyed by target name. Each value in the dict is a list of
+      overlay names corresponding to the target.
+    """
+    return {
+        b.name : [o.name for o in b.overlays
+                 ] for b in self._build_config_map.values()
+    }
+
+
+  def get_fs_view_map(self):
+    """Return the filesystem view map.
+    Returns:
+      A dict of filesystem views keyed by target name. A filesystem view is a
+      list of (source, destination) string path tuples.
+    """
+    return {b.name : b.views for b in self._build_config_map.values()}
+
+
+  def get_build_config(self, build_target):
+    return self._build_config_map[build_target]
+
+
+def factory(config_filename):
+  """Create an instance of a Config class.
+
+  Args:
+    config_filename: The name of the file from which to load the config. This
+      can be None, which results in this function returning None.
+
+  Returns:
+    If config_filename is None, returns None. Otherwise, a new instance of a
+    Config class containing the configuration parsed from config_filename.
+  """
+  if config_filename is None:
+    return None
+
+  return Config(config_filename)
diff --git a/build/sandbox/config_test.py b/build/sandbox/config_test.py
new file mode 100644
index 0000000..002c625
--- /dev/null
+++ b/build/sandbox/config_test.py
@@ -0,0 +1,307 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test config."""
+
+import tempfile
+import unittest
+from . import config
+
+_TEST_CONFIG_XML = """<config>
+  <target name="android_target_1">
+    <build_config>
+      <goal name="droid"/>
+      <goal name="dist"/>
+    </build_config>
+  </target>
+  <target name="android_target_2" tags="cool,hot">
+    <config name="fmc_framework_images" value="image1,image2"/>
+    <config name="fmc_misc_info_keys" value="misc_info_keys.txt"/>
+    <goal name="common_goal"/>
+    <build_config tags="warm">
+      <goal name="droid"/>
+      <goal name="dist"/>
+      <goal name="goal_for_android_target_2"/>
+    </build_config>
+    <build_config name="build_target_2" tags="dry">
+      <config name="fmc_framework_images" value="bt1,bt2"/>
+      <config name="fmc_misc_info_keys" value="misc_info_keys_2.txt"/>
+      <goal name="droid"/>
+      <goal name="VAR=a"/>
+    </build_config>
+  </target>
+  <target name="android_target_3" tags="">
+    <build_config>
+      <goal name="droid"/>
+    </build_config>
+  </target>
+  <target name="some_target" android_target="android_target_4">
+    <goal name="droid"/>
+    <build_config>
+    </build_config>
+  </target>
+  <target name="no_goals_target">
+    <build_config/>
+  </target>
+</config>
+"""
+
+_TEST_CONTEXTS_CONFIG_XML = """<config>
+  <target name="test_target">
+    <build_config>
+
+      <!-- no contexts attribute: always selected -->
+      <goal name="droid"/>
+
+      <!-- empty contexts attribute: always selected -->
+      <goal name="always" contexts=""/>
+
+      <!-- selected if ci context requested -->
+      <goal name="dist" contexts="ci"/>
+
+      <!-- selected if x context requested -->
+      <goal name="VAR=value" contexts="x"/>
+
+      <!-- selected if ci or x context requested -->
+      <goal name="extra_goal" contexts="ci,x"/>
+
+    </build_config>
+  </target>
+</config>
+"""
+
+class ConfigTest(unittest.TestCase):
+  """unittest for Config."""
+
+  def testConfigFilenameNone(self):
+    cfg = config.factory(None)
+    self.assertIsNone(cfg)
+
+  def testAvailableBuildTargets(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(_TEST_CONFIG_XML)
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+      self.assertListEqual(
+          cfg.get_available_build_targets(),
+          # Sorted; not in document order.
+          [
+              'android_target_1',
+              'android_target_2',
+              'android_target_3',
+              'build_target_2',
+              'no_goals_target',
+              'some_target',
+          ])
+
+  def testBuildTargetTags(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(_TEST_CONFIG_XML)
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      self.assertSetEqual(cfg.get_tags('android_target_1'), set())
+      self.assertSetEqual(
+          cfg.get_tags('android_target_2'), set(['cool', 'hot', 'warm']))
+      self.assertSetEqual(
+          cfg.get_tags('build_target_2'), set(['cool', 'hot', 'dry']))
+      self.assertSetEqual(cfg.get_tags('android_target_3'), set())
+
+      self.assertFalse(cfg.has_tag('android_target_1', 'cool'))
+      self.assertFalse(cfg.has_tag('android_target_1', 'hot'))
+      self.assertFalse(cfg.has_tag('android_target_1', 'meh'))
+
+      self.assertTrue(cfg.has_tag('android_target_2', 'cool'))
+      self.assertTrue(cfg.has_tag('android_target_2', 'hot'))
+      self.assertFalse(cfg.has_tag('android_target_2', 'meh'))
+
+      self.assertTrue(cfg.has_tag('build_target_2', 'cool'))
+      self.assertTrue(cfg.has_tag('build_target_2', 'hot'))
+      self.assertFalse(cfg.has_tag('build_target_2', 'meh'))
+
+      self.assertFalse(cfg.has_tag('android_target_3', 'cool'))
+      self.assertFalse(cfg.has_tag('android_target_3', 'hot'))
+      self.assertFalse(cfg.has_tag('android_target_3', 'meh'))
+
+  def testBuildTargetToAndroidTarget(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(_TEST_CONFIG_XML)
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      # Test that build_target android_target_1 -> android_target_1.
+      self.assertEqual(
+          cfg.get_build_config_android_target('android_target_1'),
+          'android_target_1')
+
+      # Test that build_target android_target_2 -> android_target_2.
+      self.assertEqual(
+          cfg.get_build_config_android_target('android_target_2'),
+          'android_target_2')
+
+      # Test that build_target build_target_2 -> android_target_2.
+      self.assertEqual(
+          cfg.get_build_config_android_target('build_target_2'),
+          'android_target_2')
+
+      # Test overriding android_target property
+      self.assertEqual(
+          cfg.get_build_config_android_target('some_target'),
+          'android_target_4')
+
+  def testBuildTargetToBuildGoals(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(_TEST_CONFIG_XML)
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      # Test that build_target android_target_1 has goals droid and dist.
+      self.assertEqual(
+          cfg.get_build_goals('android_target_1'),
+          ['droid', 'dist'])
+
+      # Test that build_target android_target_2 has goals droid, dist, and
+      # goal_for_android_target_2.
+      self.assertEqual(
+          cfg.get_build_goals('android_target_2'),
+          ['common_goal', 'droid', 'dist', 'goal_for_android_target_2'])
+
+      # Test that build_target build_target_2 has goals droid and VAR=a.
+      self.assertEqual(
+          cfg.get_build_goals('build_target_2'),
+          ['common_goal', 'droid', 'VAR=a'])
+
+      # Test empty goals
+      self.assertEqual(cfg.get_build_goals('no_goals_target'),[])
+
+  def testBuildTargetToBuildGoalsWithContexts(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(_TEST_CONTEXTS_CONFIG_XML)
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      # Test that when contexts is the default (empty), we select only the
+      # "always" goals.
+
+      build_goals = cfg.get_build_goals('test_target')
+      self.assertEqual(build_goals, ['droid', 'always'])
+
+      # Test that when contexts is explicitly empty, we select only the
+      # "always" goals.
+
+      build_goals = cfg.get_build_goals('test_target', set())
+      self.assertEqual(build_goals, ['droid', 'always'])
+
+      # Similarly, test that when contexts is doesn't match any goal_contexts,
+      # we select only the "always" goals.
+
+      build_goals = cfg.get_build_goals('test_target', set('no_matchy'))
+      self.assertEqual(build_goals, ['droid', 'always'])
+
+      # Test that when contexts is set(['x']), we select the "always" goals and
+      # the x goals.
+
+      build_goals = cfg.get_build_goals('test_target', set(['x']))
+
+      self.assertEqual(
+          build_goals,
+          ['droid', 'always', 'VAR=value', 'extra_goal'])
+
+      # Test that when requested_contexts is set(['ci', 'x']), we select the
+      # "always" goals, the ci goals, and the x goals.
+
+      build_goals = cfg.get_build_goals('test_target', set(['ci', 'x']))
+
+      self.assertEqual(
+          build_goals,
+          ['droid', 'always', 'dist', 'VAR=value', 'extra_goal'])
+
+  def testAllowReadWriteAll(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write("""<?xml version="1.0" encoding="UTF-8" ?>
+        <config>
+          <target name="target_allowed" allow_readwrite_all="true">
+            <build_config>
+              <goal name="droid"/>
+            </build_config>
+            <allow_readwrite_all/>
+          </target>
+          <target name="target_not_allowed">
+            <build_config>
+              <goal name="droid"/>
+            </build_config>
+          </target>
+          <target name="target_also_not_allowed" allow_readwrite_all="false">
+            <build_config>
+              <goal name="droid"/>
+            </build_config>
+          </target>
+        </config>""")
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      self.assertTrue(cfg.get_allow_readwrite_all('target_allowed'))
+
+      self.assertFalse(cfg.get_allow_readwrite_all('target_not_allowed'))
+
+      self.assertFalse(cfg.get_allow_readwrite_all('target_also_not_allowed'))
+
+  def testAllowedProjectsFile(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(
+          '<?xml version="1.0" encoding="UTF-8" ?>'
+          '<config>'
+          '  <target name="target_name">'
+          '    <build_config allowed_projects_file="path/to/default/build/config/allowed_projects.xml">'
+          '      <goal name="build_goal"/>'
+          '    </build_config>'
+          '    <build_config name="has_allowed_projects_file" allowed_projects_file="path/to/named/build/config/allowed_projects.xml">'
+          '      <goal name="build_goal"/>'
+          '    </build_config>'
+          '    <build_config name="no_allowed_projects_file">'
+          '      <goal name="build_goal"/>'
+          '    </build_config>'
+          '  </target>'
+          '</config>')
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      self.assertEqual(
+          cfg.get_allowed_projects_file('target_name'),
+          'path/to/default/build/config/allowed_projects.xml')
+      self.assertEqual(
+          cfg.get_allowed_projects_file('has_allowed_projects_file'),
+          'path/to/named/build/config/allowed_projects.xml')
+      self.assertIsNone(
+          cfg.get_allowed_projects_file('no_allowed_projects_file'))
+
+  def testMergeConfig(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(_TEST_CONFIG_XML)
+      test_config.flush()
+      cfg = config.factory(test_config.name)
+
+      bc_at2 = cfg.get_build_config('android_target_2')
+      self.assertDictEqual(bc_at2.configurations, {
+        'fmc_framework_images': 'image1,image2',
+        'fmc_misc_info_keys': 'misc_info_keys.txt'
+      })
+
+      bc_bt2 = cfg.get_build_config('build_target_2')
+      self.assertDictEqual(bc_bt2.configurations, {
+        'fmc_framework_images': 'bt1,bt2',
+        'fmc_misc_info_keys': 'misc_info_keys_2.txt'
+      })
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/sandbox/nsjail.cfg b/build/sandbox/nsjail.cfg
index 03ac8ca..509ecc6 100644
--- a/build/sandbox/nsjail.cfg
+++ b/build/sandbox/nsjail.cfg
@@ -74,6 +74,15 @@
   is_bind: false
 }
 
+# Some tools need /dev/shm to created a named semaphore. Use a new tmpfs to
+# limit access to the external environment.
+mount {
+  dst: "/dev/shm"
+  fstype: "tmpfs"
+  rw: true
+  is_bind: false
+}
+
 # Map the working User ID to a username
 # Some tools like Java need a valid username
 mount {
diff --git a/build/sandbox/nsjail.py b/build/sandbox/nsjail.py
index 1d2872e..c388d0b 100644
--- a/build/sandbox/nsjail.py
+++ b/build/sandbox/nsjail.py
@@ -25,6 +25,7 @@
 import os
 import re
 import subprocess
+from . import config
 from .overlay import BindMount
 from .overlay import BindOverlay
 
@@ -38,53 +39,19 @@
 
 _CHROOT_MOUNT_POINTS = [
   'bin', 'sbin',
-  'etc/alternatives', 'etc/default' 'etc/perl',
+  'etc/alternatives', 'etc/default', 'etc/perl',
   'etc/ssl', 'etc/xml',
   'lib', 'lib32', 'lib64', 'libx32',
   'usr',
 ]
 
-def load_rw_whitelist(rw_whitelist_config):
-  """Loads a read/write whitelist configuration file.
-
-  The read/write whitelist configuration file is a text file that contains a
-  list of source_dir relative paths which should be mounted read/write inside
-  the build sandbox. Empty lines and lines begnning with a comment marker ('#')
-  will be ignored. An empty whitelist implies that all source paths are mounted
-  read-only. An empty rw_whitelist_config argument implies that all source
-  paths are mounted read/write.
-
-  Args:
-    rw_whitelist_config: A string path to a read/write whitelist file.
-
-  Returns:
-    A set of whitelist path strings.
-  """
-  if not rw_whitelist_config:
-    return None
-
-  if not os.path.exists(rw_whitelist_config):
-    return None
-
-  ret = set()
-  with open(rw_whitelist_config, 'r') as f:
-    for p in f.read().splitlines():
-      p = p.strip()
-      if not p or p.startswith('#'):
-        continue
-      ret.add(p)
-
-  return ret
-
 
 def run(command,
-        android_target,
+        build_target,
         nsjail_bin,
         chroot,
         overlay_config=None,
-        rw_whitelist_config=None,
         source_dir=os.getcwd(),
-        out_dirname_for_whiteout=None,
         dist_dir=None,
         build_id=None,
         out_dir = None,
@@ -98,22 +65,20 @@
         dry_run=False,
         quiet=False,
         env=[],
+        nsjail_wrapper=[],
         stdout=None,
-        stderr=None):
+        stderr=None,
+        allow_network=False):
   """Run inside an NsJail sandbox.
 
   Args:
     command: A list of strings with the command to run.
-    android_target: A string with the name of the target to be prepared
+    build_target: A string with the name of the build target to be prepared
       inside the container.
     nsjail_bin: A string with the path to the nsjail binary.
     chroot: A string with the path to the chroot.
     overlay_config: A string path to an overlay configuration file.
-    rw_whitelist_config: A string path to a read/write whitelist configuration file.
     source_dir: A string with the path to the Android platform source.
-    out_dirname_for_whiteout: The optional name of the folder within
-      source_dir that is the Android build out folder *as seen from outside
-      the Docker container*.
     dist_dir: A string with the path to the dist directory.
     build_id: A string with the build identifier.
     out_dir: An optional path to the Android build out folder.
@@ -130,11 +95,13 @@
     quiet: If true, the function will not display the command and
       will pass -quiet argument to nsjail
     env: An array of environment variables to define in the jail in the `var=val` syntax.
+    nsjail_wrapper: A list of strings used to wrap the nsjail command.
     stdout: the standard output for all printed messages. Valid values are None, a file
       descriptor or file object. A None value means sys.stdout is used.
     stderr: the standard error for all printed messages. Valid values are None, a file
       descriptor or file object, and subprocess.STDOUT (which indicates that all stderr
       should be redirected to stdout). A None value means sys.stderr is used.
+    allow_network: allow access to host network
 
   Returns:
     A list of strings with the command executed.
@@ -143,13 +110,11 @@
 
   nsjail_command = get_command(
       command=command,
-      android_target=android_target,
+      build_target=build_target,
       nsjail_bin=nsjail_bin,
       chroot=chroot,
-      overlay_config=overlay_config,
-      rw_whitelist_config=rw_whitelist_config,
+      cfg=config.factory(overlay_config),
       source_dir=source_dir,
-      out_dirname_for_whiteout=out_dirname_for_whiteout,
       dist_dir=dist_dir,
       build_id=build_id,
       out_dir=out_dir,
@@ -161,7 +126,9 @@
       readonly_bind_mounts=readonly_bind_mounts,
       extra_nsjail_args=extra_nsjail_args,
       quiet=quiet,
-      env=env)
+      env=env,
+      nsjail_wrapper=nsjail_wrapper,
+      allow_network=allow_network)
 
   run_command(
       nsjail_command=nsjail_command,
@@ -174,13 +141,11 @@
   return nsjail_command
 
 def get_command(command,
-        android_target,
+        build_target,
         nsjail_bin,
         chroot,
-        overlay_config=None,
-        rw_whitelist_config=None,
+        cfg=None,
         source_dir=os.getcwd(),
-        out_dirname_for_whiteout=None,
         dist_dir=None,
         build_id=None,
         out_dir = None,
@@ -192,21 +157,19 @@
         readonly_bind_mounts=[],
         extra_nsjail_args=[],
         quiet=False,
-        env=[]):
+        env=[],
+        nsjail_wrapper=[],
+        allow_network=False):
   """Get command to run nsjail sandbox.
 
   Args:
     command: A list of strings with the command to run.
-    android_target: A string with the name of the target to be prepared
+    build_target: A string with the name of the build target to be prepared
       inside the container.
     nsjail_bin: A string with the path to the nsjail binary.
     chroot: A string with the path to the chroot.
-    overlay_config: A string path to an overlay configuration file.
-    rw_whitelist_config: A string path to a read/write whitelist configuration file.
+    cfg: A config.Config instance or None.
     source_dir: A string with the path to the Android platform source.
-    out_dirname_for_whiteout: The optional name of the folder within
-      source_dir that is the Android build out folder *as seen from outside
-      the Docker container*.
     dist_dir: A string with the path to the dist directory.
     build_id: A string with the build identifier.
     out_dir: An optional path to the Android build out folder.
@@ -220,6 +183,7 @@
     quiet: If true, the function will not display the command and
       will pass -quiet argument to nsjail
     env: An array of environment variables to define in the jail in the `var=val` syntax.
+    allow_network: allow access to host network
 
   Returns:
     A list of strings with the command to execute.
@@ -248,7 +212,7 @@
       raise ValueError('error: the provided meta_android_dir is not a path'
           'relative to meta_root_dir.')
 
-  nsjail_command = [nsjail_bin,
+  nsjail_command = nsjail_wrapper + [nsjail_bin,
     '--env', 'USER=nobody',
     '--config', config_file]
 
@@ -271,8 +235,6 @@
     nsjail_command.append('--quiet')
 
   whiteout_list = set()
-  if out_dirname_for_whiteout:
-    whiteout_list.add(os.path.join(source_dir, out_dirname_for_whiteout))
   if out_dir and (
       os.path.dirname(out_dir) == source_dir) and (
       os.path.basename(out_dir) != 'out'):
@@ -280,36 +242,34 @@
     if not os.path.exists(out_dir):
       os.makedirs(out_dir)
 
-  rw_whitelist = load_rw_whitelist(rw_whitelist_config)
-
-  # Apply the overlay for the selected Android target to the source
-  # directory if an overlay configuration was provided
-  if overlay_config and os.path.exists(overlay_config):
-    overlay = BindOverlay(android_target,
+  # Apply the overlay for the selected Android target to the source directory
+  # from the supplied config.Config instance (which may be None).
+  if cfg is not None:
+    overlay = BindOverlay(build_target,
                       source_dir,
-                      overlay_config,
+                      cfg,
                       whiteout_list,
                       _SOURCE_MOUNT_POINT,
-                      rw_whitelist)
+                      quiet=quiet)
     bind_mounts = overlay.GetBindMounts()
   else:
     bind_mounts = collections.OrderedDict()
-    bind_mounts[_SOURCE_MOUNT_POINT] = BindMount(source_dir, False)
+    bind_mounts[_SOURCE_MOUNT_POINT] = BindMount(source_dir, False, False)
 
   if out_dir:
-    bind_mounts[_OUT_MOUNT_POINT] = BindMount(out_dir, False)
+    bind_mounts[_OUT_MOUNT_POINT] = BindMount(out_dir, False, False)
 
   if dist_dir:
-    bind_mounts[_DIST_MOUNT_POINT] = BindMount(dist_dir, False)
+    bind_mounts[_DIST_MOUNT_POINT] = BindMount(dist_dir, False, False)
     nsjail_command.extend([
         '--env', 'DIST_DIR=%s'%_DIST_MOUNT_POINT
     ])
 
   if meta_root_dir:
-    bind_mounts[_META_MOUNT_POINT] = BindMount(meta_root_dir, False)
-    bind_mounts[os.path.join(_META_MOUNT_POINT, meta_android_dir)] = BindMount(source_dir, False)
+    bind_mounts[_META_MOUNT_POINT] = BindMount(meta_root_dir, False, False)
+    bind_mounts[os.path.join(_META_MOUNT_POINT, meta_android_dir)] = BindMount(source_dir, False, False)
     if out_dir:
-      bind_mounts[os.path.join(_META_MOUNT_POINT, meta_android_dir, 'out')] = BindMount(out_dir, False)
+      bind_mounts[os.path.join(_META_MOUNT_POINT, meta_android_dir, 'out')] = BindMount(out_dir, False, False)
 
   for bind_destination, bind_mount in bind_mounts.items():
     if bind_mount.readonly:
@@ -340,6 +300,11 @@
   for var in env:
     nsjail_command.extend(['--env', var])
 
+  if allow_network:
+    nsjail_command.extend(['--disable_clone_newnet',
+                           '--bindmount_ro',
+                           '/etc/resolv.conf'])
+
   nsjail_command.extend(extra_nsjail_args)
 
   nsjail_command.append('--')
@@ -411,9 +376,6 @@
       '--overlay_config',
       help='Path to the overlay configuration file.')
   parser.add_argument(
-      '--rw_whitelist_config',
-      help='Path to the read/write whitelist configuration file.')
-  parser.add_argument(
       '--source_dir',
       default=os.getcwd(),
       help='Path to Android platform source to be mounted as /src.')
@@ -432,24 +394,12 @@
       'the Android build. This path must be relative to meta_root_dir. '
       'Defaults to \'%s\'' % _DEFAULT_META_ANDROID_DIR)
   parser.add_argument(
-      '--out_dirname_for_whiteout',
-      help='The optional name of the folder within source_dir that is the '
-      'Android build out folder *as seen from outside the Docker '
-      'container*.')
-  parser.add_argument(
-      '--whiteout',
-      action='append',
-      default=[],
-      help='Optional glob filter of directories to add to the whiteout. The '
-      'directories will not appear in the container. '
-      'Can be specified multiple times.')
-  parser.add_argument(
       '--command',
       default=_DEFAULT_COMMAND,
       help='Command to run after entering the NsJail.'
       'If not set then an interactive Bash shell will be launched')
   parser.add_argument(
-      '--android_target',
+      '--build_target',
       required=True,
       help='Android target selected for building')
   parser.add_argument(
@@ -502,6 +452,10 @@
       action='append',
       help='Specify an environment variable to the NSJail sandbox. Can be specified '
       'muliple times. Syntax: var_name=value')
+  parser.add_argument(
+      '--allow_network', action='store_true',
+      help='If provided, allow access to the host network. WARNING: Using this '
+      'flag exposes the network inside jail. Use only when needed.')
   return parser.parse_args()
 
 def run_with_args(args):
@@ -518,11 +472,9 @@
   run(chroot=args.chroot,
       nsjail_bin=args.nsjail_bin,
       overlay_config=args.overlay_config,
-      rw_whitelist_config=args.rw_whitelist_config,
       source_dir=args.source_dir,
       command=args.command.split(),
-      android_target=args.android_target,
-      out_dirname_for_whiteout=args.out_dirname_for_whiteout,
+      build_target=args.build_target,
       dist_dir=args.dist_dir,
       build_id=args.build_id,
       out_dir=args.out_dir,
@@ -534,7 +486,8 @@
       readonly_bind_mounts=args.bindmount_ro,
       dry_run=args.dry_run,
       quiet=args.quiet,
-      env=args.env)
+      env=args.env,
+      allow_network=args.allow_network)
 
 def main():
   run_with_args(parse_args())
diff --git a/build/sandbox/nsjail_test.py b/build/sandbox/nsjail_test.py
index cfe4303..a73bbdb 100644
--- a/build/sandbox/nsjail_test.py
+++ b/build/sandbox/nsjail_test.py
@@ -32,7 +32,7 @@
         chroot='/chroot',
         source_dir='/source_dir',
         command=['/bin/bash'],
-        android_target='target_name',
+        build_target='target_name',
         dry_run=True)
     self.assertEqual(
         commands,
@@ -53,7 +53,7 @@
         chroot='/chroot',
         source_dir='/source_dir',
         command=['/bin/bash'],
-        android_target='target_name',
+        build_target='target_name',
         dry_run=True,
         meta_root_dir='/meta/dir',
         meta_android_dir='/android/dir')
@@ -65,7 +65,7 @@
           chroot='/chroot',
           source_dir='/source_dir',
           command=['/bin/bash'],
-          android_target='target_name',
+          build_target='target_name',
           stdout=out)
       out.seek(0)
       stdout = out.read()
@@ -81,7 +81,7 @@
           chroot='/chroot',
           source_dir='/source_dir',
           command=['/bin/bash'],
-          android_target='target_name')
+          build_target='target_name')
 
   def testDist(self):
     commands = nsjail.run(
@@ -89,7 +89,7 @@
         chroot='/chroot',
         source_dir='/source_dir',
         command=['/bin/bash'],
-        android_target='target_name',
+        build_target='target_name',
         dist_dir='/dist_dir',
         dry_run=True)
     self.assertEqual(
@@ -111,7 +111,7 @@
         chroot='/chroot',
         source_dir='/source_dir',
         command=['/bin/bash'],
-        android_target='target_name',
+        build_target='target_name',
         build_id='0',
         dry_run=True)
     self.assertEqual(
@@ -132,7 +132,7 @@
         chroot='/chroot',
         source_dir='/source_dir',
         command=['/bin/bash'],
-        android_target='target_name',
+        build_target='target_name',
         max_cpus=1,
         dry_run=True)
     self.assertEqual(
@@ -153,7 +153,7 @@
         chroot='/chroot',
         source_dir='/source_dir',
         command=['/bin/bash'],
-        android_target='target_name',
+        build_target='target_name',
         max_cpus=1,
         dry_run=True,
         env=['foo=bar', 'spam=eggs'])
diff --git a/build/sandbox/overlay.py b/build/sandbox/overlay.py
index ad298a6..873817e 100644
--- a/build/sandbox/overlay.py
+++ b/build/sandbox/overlay.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-"""Mounts all the projects required by a selected Android target.
+"""Mounts all the projects required by a selected Build target.
 
 For details on how filesystem overlays work see the filesystem overlays
 section of the README.md.
@@ -27,8 +27,10 @@
 import subprocess
 import tempfile
 import xml.etree.ElementTree as ET
+from . import config
 
-BindMount = collections.namedtuple('BindMount', ['source_dir', 'readonly'])
+BindMount = collections.namedtuple(
+    'BindMount', ['source_dir', 'readonly', 'allows_replacement'])
 
 
 class BindOverlay(object):
@@ -57,11 +59,14 @@
       path: A string path to be checked.
 
     Returns:
-      A string of the conflicting path in the bind mounts.
-      None if there was no conflict found.
+      A tuple containing a string of the conflicting path in the bind mounts and
+      whether or not to allow this path to supersede any conflicts.
+      None, False if there was no conflict found.
     """
     conflict_path = None
+    allows_replacement = False
     for bind_destination, bind_mount in self._bind_mounts.items():
+      allows_replacement = bind_mount.allows_replacement
       # Check if the path is a subdir or the bind destination
       if path == bind_destination:
         conflict_path = bind_mount.source_dir
@@ -75,21 +80,30 @@
           conflict_path = path_in_source
           break
 
-    return conflict_path
+    return conflict_path, allows_replacement
 
-  def _AddOverlay(self, overlay_dir, intermediate_work_dir, skip_subdirs,
-                  destination_dir, rw_whitelist):
+  def _AddOverlay(self, source_dir, overlay_dir, intermediate_work_dir,
+                  skip_subdirs, allowed_projects, destination_dir,
+                  allowed_read_write, contains_read_write,
+                  is_replacement_allowed):
     """Adds a single overlay directory.
 
     Args:
+      source_dir: A string with the path to the Android platform source.
       overlay_dir: A string path to the overlay directory to apply.
       intermediate_work_dir: A string path to the intermediate work directory used as the
         base for constructing the overlay filesystem.
       skip_subdirs: A set of string paths to skip from overlaying.
+      allowed_projects: If not None, any .git project path not in this list
+        is excluded from overlaying.
       destination_dir: A string with the path to the source with the overlays
         applied to it.
-      rw_whitelist: An optional set of source paths to bind mount with
-        read/write access.
+      allowed_read_write: A function returns true if the path input should
+        be allowed read/write access.
+      contains_read_write: A function returns true if the path input contains
+        a sub-path that should be allowed read/write access.
+      is_replacement_allowed: A function returns true if the path can replace a
+        subsequent path.
     """
     # Traverse the overlay directory twice
     # The first pass only process git projects
@@ -110,15 +124,18 @@
       current_dir_destination = os.path.normpath(
         os.path.join(destination_dir, current_dir_relative))
 
-      if '.git' in subdirs:
+      if '.git' in subdirs or '.git' in files or '.bindmount' in files:
         # The current dir is a git project
         # so just bind mount it
         del subdirs[:]
 
-        if rw_whitelist is None or current_dir_origin in rw_whitelist:
-          self._AddBindMount(current_dir_origin, current_dir_destination, False)
-        else:
-          self._AddBindMount(current_dir_origin, current_dir_destination, True)
+        if '.bindmount' in files or (not allowed_projects or
+            os.path.relpath(current_dir_origin, source_dir) in allowed_projects):
+            self._AddBindMount(
+                current_dir_origin, current_dir_destination,
+                False if allowed_read_write(current_dir_origin) else True,
+                is_replacement_allowed(
+                    os.path.basename(overlay_dir), current_dir_relative))
 
         current_dir_ancestor = current_dir_origin
         while current_dir_ancestor and current_dir_ancestor not in dirs_with_git_projects:
@@ -132,7 +149,7 @@
         del subdirs[:]
         continue
 
-      if '.git' in subdirs:
+      if '.git' in subdirs or '.git' in files or '.bindmount' in files:
         del subdirs[:]
         continue
 
@@ -140,37 +157,52 @@
       current_dir_destination = os.path.normpath(
         os.path.join(destination_dir, current_dir_relative))
 
+      bindCurrentDir = True
+
+      # Directories with git projects can't be bind mounted
+      # because git projects are individually mounted
       if current_dir_origin in dirs_with_git_projects:
-        # Symbolic links to subdirectories
-        # have to be copied to the intermediate work directory.
-        # We can't bind mount them because bind mounts deference
-        # symbolic links, and the build system filters out any
-        # directory symbolic links.
-        for subdir in subdirs:
-          subdir_origin = os.path.join(current_dir_origin, subdir)
-          if os.path.islink(subdir_origin):
-            if subdir_origin not in skip_subdirs:
-              subdir_destination = os.path.join(intermediate_work_dir,
-                  current_dir_relative, subdir)
-              self._CopyFile(subdir_origin, subdir_destination)
+        bindCurrentDir = False
 
-        # bind each file individually then keep travesting
-        for file in files:
-          file_origin = os.path.join(current_dir_origin, file)
-          file_destination = os.path.join(current_dir_destination, file)
-          if rw_whitelist is None or file_origin in rw_whitelist:
-            self._AddBindMount(file_origin, file_destination, False)
-          else:
-            self._AddBindMount(file_origin, file_destination, True)
+      # A directory that contains read-write paths should only
+      # ever be bind mounted if the directory itself is read-write
+      if contains_read_write(current_dir_origin) and not allowed_read_write(current_dir_origin):
+        bindCurrentDir = False
 
-      else:
-        # The current dir does not have any git projects to it can be bind
-        # mounted wholesale
+      if bindCurrentDir:
+        # The current dir can be bind mounted wholesale
         del subdirs[:]
-        if rw_whitelist is None or current_dir_origin in rw_whitelist:
+        if allowed_read_write(current_dir_origin):
           self._AddBindMount(current_dir_origin, current_dir_destination, False)
         else:
           self._AddBindMount(current_dir_origin, current_dir_destination, True)
+        continue
+
+      # If we've made it this far then we're going to process
+      # each file and subdir individually
+
+      for subdir in subdirs:
+        subdir_origin = os.path.join(current_dir_origin, subdir)
+        # Symbolic links to subdirectories
+        # have to be copied to the intermediate work directory.
+        # We can't bind mount them because bind mounts dereference
+        # symbolic links, and the build system filters out any
+        # directory symbolic links.
+        if os.path.islink(subdir_origin):
+          if subdir_origin not in skip_subdirs:
+            subdir_destination = os.path.join(intermediate_work_dir,
+                current_dir_relative, subdir)
+            self._CopyFile(subdir_origin, subdir_destination)
+
+      # bind each file individually then keep traversing
+      for file in files:
+        file_origin = os.path.join(current_dir_origin, file)
+        file_destination = os.path.join(current_dir_destination, file)
+        if allowed_read_write(file_origin):
+          self._AddBindMount(file_origin, file_destination, False)
+        else:
+          self._AddBindMount(file_origin, file_destination, True)
+
 
   def _AddArtifactDirectories(self, source_dir, destination_dir, skip_subdirs):
     """Add directories that were not synced as workspace source.
@@ -203,13 +235,14 @@
     if os.path.exists(repo_origin):
       repo_destination = os.path.normpath(
         os.path.join(destination_dir, '.repo'))
-      self._AddBindMount(repo_origin, repo_destination, False)
+      self._AddBindMount(repo_origin, repo_destination, True)
       skip_subdirs.add(repo_origin)
 
     return skip_subdirs
 
   def _AddOverlays(self, source_dir, overlay_dirs, destination_dir,
-                   skip_subdirs, rw_whitelist):
+                   skip_subdirs, allowed_projects, allowed_read_write,
+                   contains_read_write, is_replacement_allowed):
     """Add the selected overlay directories.
 
     Args:
@@ -219,8 +252,14 @@
       destination_dir: A string with the path to the source where the overlays
         will be applied.
       skip_subdirs: A set of string paths to be skipped from overlays.
-      rw_whitelist: An optional set of source paths to bind mount with
-        read/write access.
+      allowed_projects: If not None, any .git project path not in this list
+        is excluded from overlaying.
+      allowed_read_write: A function returns true if the path input should
+        be allowed read/write access.
+      contains_read_write: A function returns true if the path input contains
+        a sub-path that should be allowed read/write access.
+      is_replacement_allowed: A function returns true if the path can replace a
+        subsequent path.
     """
 
     # Create empty intermediate workdir
@@ -235,7 +274,8 @@
     # depth first traversal algorithm.
     #
     # The algorithm described works under the condition that the overlaid file
-    # systems do not have conflicting projects.
+    # systems do not have conflicting projects or that the conflict path is
+    # specifically called-out as a replacement path.
     #
     # The results of attempting to overlay two git projects on top
     # of each other are unpredictable and may push the limits of bind mounts.
@@ -243,11 +283,17 @@
     skip_subdirs.add(os.path.join(source_dir, 'overlays'))
 
     for overlay_dir in overlay_dirs:
-      self._AddOverlay(overlay_dir, intermediate_work_dir,
-                       skip_subdirs, destination_dir, rw_whitelist)
+      self._AddOverlay(source_dir, overlay_dir, intermediate_work_dir,
+                       skip_subdirs, allowed_projects, destination_dir,
+                       allowed_read_write, contains_read_write,
+                       is_replacement_allowed)
 
 
-  def _AddBindMount(self, source_dir, destination_dir, readonly=False):
+  def _AddBindMount(self,
+                    source_dir,
+                    destination_dir,
+                    readonly=False,
+                    allows_replacement=False):
     """Adds a bind mount for the specified directory.
 
     Args:
@@ -258,18 +304,21 @@
         it will be created.
       readonly: A flag to indicate whether this path should be bind mounted
         with read-only access.
+      allow_replacement: A flag to indicate whether this path is allowed to replace a
+        conflicting path.
     """
-    conflict_path = self._FindBindMountConflict(destination_dir)
-    if conflict_path:
+    conflict_path, replacement = self._FindBindMountConflict(destination_dir)
+    if conflict_path and not replacement:
       raise ValueError("Project %s could not be overlaid at %s "
         "because it conflicts with %s"
         % (source_dir, destination_dir, conflict_path))
-
-    if len(self._bind_mounts) >= self.MAX_BIND_MOUNTS:
-      raise ValueError("Bind mount limit of %s reached" % self.MAX_BIND_MOUNTS)
-
-    self._bind_mounts[destination_dir] = BindMount(
-        source_dir=source_dir, readonly=readonly)
+    elif not conflict_path:
+      if len(self._bind_mounts) >= self.MAX_BIND_MOUNTS:
+        raise ValueError("Bind mount limit of %s reached" % self.MAX_BIND_MOUNTS)
+      self._bind_mounts[destination_dir] = BindMount(
+          source_dir=source_dir,
+          readonly=readonly,
+          allows_replacement=allows_replacement)
 
   def _CopyFile(self, source_path, dest_path):
     """Copies a file to the specified destination.
@@ -295,28 +344,116 @@
     """
     return self._bind_mounts
 
+  def _GetReadWriteFunction(self, build_config, source_dir):
+    """Returns a function that tells you how to mount a path.
+
+    Args:
+      build_config: A config.BuildConfig instance of the build target to be
+                    prepared.
+      source_dir: A string with the path to the Android platform source.
+
+    Returns:
+      A function that takes a string path as an input and returns
+      True if the path should be mounted read-write or False if
+      the path should be mounted read-only.
+    """
+
+    # The read/write allowlist provides paths relative to the source dir. It
+    # needs to be updated with absolute paths to make lookup possible.
+    rw_allowlist = {os.path.join(source_dir, p) for p in build_config.allow_readwrite}
+
+    def AllowReadWrite(path):
+      return build_config.allow_readwrite_all or path in rw_allowlist
+
+    return AllowReadWrite
+
+  def _GetContainsReadWriteFunction(self, build_config, source_dir):
+    """Returns a function that tells you if a directory contains a read-write dir
+
+    Args:
+      build_config: A config.BuildConfig instance of the build target to be
+                    prepared.
+      source_dir: A string with the path to the Android platform source.
+
+    Returns:
+      A function that takes a string path as an input and returns
+      True if the path contains a read-write path
+    """
+
+    # Get all dirs with allowed read-write
+    # and all their ancestor directories
+    contains_rw = set()
+    for path in build_config.allow_readwrite:
+      while path not in ["", "/"]:
+      # The read/write allowlist provides paths relative to the source dir. It
+      # needs to be updated with absolute paths to make lookup possible.
+        contains_rw.add(os.path.join(source_dir, path))
+        path = os.path.dirname(path)
+
+    def ContainsReadWrite(path):
+      return build_config.allow_readwrite_all or path in contains_rw
+
+    return ContainsReadWrite
+
+  def _GetAllowedProjects(self, build_config):
+    """Returns a set of paths that are allowed to contain .git projects.
+
+    Args:
+      build_config: A config.BuildConfig instance of the build target to be
+                    prepared.
+
+    Returns:
+      If the target has an allowed projects file: a set of paths. Any .git
+        project path not in this set should be excluded from overlaying.
+      Otherwise: None
+    """
+    if not build_config.allowed_projects_file:
+      return None
+    allowed_projects = ET.parse(build_config.allowed_projects_file)
+    paths = set()
+    for child in allowed_projects.getroot().findall("project"):
+      paths.add(child.attrib.get("path", child.attrib["name"]))
+    return paths
+
+  def _IsReplacementAllowedFunction(self, build_config):
+    """Returns a function to determin if a given path is replaceable.
+
+    Args:
+      build_config: A config.BuildConfig instance of the build target to be
+                    prepared.
+
+    Returns:
+      A function that takes an overlay name and string path as input and
+      returns True if the path is replaceable.
+    """
+    def is_replacement_allowed_func(overlay_name, path):
+      for overlay in build_config.overlays:
+        if overlay_name == overlay.name and path in overlay.replacement_paths:
+          return True
+      return False
+
+    return is_replacement_allowed_func
+
   def __init__(self,
-               target,
+               build_target,
                source_dir,
-               config_file,
+               cfg,
                whiteout_list = [],
                destination_dir=None,
-               rw_whitelist=None):
+               quiet=False):
     """Inits Overlay with the details of what is going to be overlaid.
 
     Args:
-      target: A string with the name of the target to be prepared.
+      build_target: A string with the name of the build target to be prepared.
       source_dir: A string with the path to the Android platform source.
-      config_file: A string path to the XML config file.
+      cfg: A config.Config instance.
       whiteout_list: A list of directories to hide from the build system.
       destination_dir: A string with the path where the overlay filesystem
         will be created. If none is provided, the overlay filesystem
         will be applied directly on top of source_dir.
-      rw_whitelist: An optional set of source paths to bind mount with
-        read/write access. If none is provided, all paths will be mounted with
-        read/write access. If the set is empty, all paths will be mounted
-        read-only.
+      quiet: A boolean that, when True, suppresses debug output.
     """
+    self._quiet = quiet
 
     if not destination_dir:
       destination_dir = source_dir
@@ -330,121 +467,35 @@
     # seems appropriate
     skip_subdirs = set(whiteout_list)
 
-    # The read/write whitelist provids paths relative to the source dir. It
-    # needs to be updated with absolute paths to make lookup possible.
-    if rw_whitelist:
-      rw_whitelist = {os.path.join(source_dir, p) for p in rw_whitelist}
+    build_config = cfg.get_build_config(build_target)
+
+    allowed_read_write = self._GetReadWriteFunction(build_config, source_dir)
+    contains_read_write = self._GetContainsReadWriteFunction(build_config, source_dir)
+    allowed_projects = self._GetAllowedProjects(build_config)
+    is_replacement_allowed = self._IsReplacementAllowedFunction(build_config)
 
     overlay_dirs = []
-    overlay_map = get_overlay_map(config_file)
-    for overlay_dir in overlay_map[target]:
-      overlay_dir = os.path.join(source_dir, 'overlays', overlay_dir)
+    for overlay in build_config.overlays:
+      overlay_dir = os.path.join(source_dir, 'overlays', overlay.name)
       overlay_dirs.append(overlay_dir)
 
     self._AddOverlays(
-        source_dir, overlay_dirs, destination_dir, skip_subdirs, rw_whitelist)
+        source_dir, overlay_dirs, destination_dir,
+        skip_subdirs, allowed_projects, allowed_read_write, contains_read_write,
+        is_replacement_allowed)
 
     # If specified for this target, create a custom filesystem view
-    fs_view_map = get_fs_view_map(config_file)
-    if target in fs_view_map:
-      for path_relative_from, path_relative_to in fs_view_map[target]:
-        path_from = os.path.join(source_dir, path_relative_from)
-        if os.path.isfile(path_from) or os.path.isdir(path_from):
-          path_to = os.path.join(destination_dir, path_relative_to)
-          if rw_whitelist is None or path_from in rw_whitelist:
-            self._AddBindMount(path_from, path_to, False)
-          else:
-            self._AddBindMount(path_from, path_to, True)
+    for path_relative_from, path_relative_to in build_config.views:
+      path_from = os.path.join(source_dir, path_relative_from)
+      if os.path.isfile(path_from) or os.path.isdir(path_from):
+        path_to = os.path.join(destination_dir, path_relative_to)
+        if allowed_read_write(path_from):
+          self._AddBindMount(path_from, path_to, False)
         else:
-          raise ValueError("Path '%s' must be a file or directory" % path_from)
+          self._AddBindMount(path_from, path_to, True)
+      else:
+        raise ValueError("Path '%s' must be a file or directory" % path_from)
 
     self._overlay_dirs = overlay_dirs
-    print('Applied overlays ' + ' '.join(self._overlay_dirs))
-
-  def __del__(self):
-    """Cleans up Overlay.
-    """
-    if self._overlay_dirs:
-      print('Stripped out overlay ' + ' '.join(self._overlay_dirs))
-
-def get_config(config_file):
-  """Parses the overlay configuration file.
-
-  Args:
-    config_file: A string path to the XML config file.
-
-  Returns:
-    A root config XML Element.
-    None if there is no config file.
-  """
-  config = None
-  if os.path.exists(config_file):
-    tree = ET.parse(config_file)
-    config = tree.getroot()
-  return config
-
-def get_overlay_map(config_file):
-  """Retrieves the map of overlays for each target.
-
-  Args:
-    config_file: A string path to the XML config file.
-
-  Returns:
-    A dict of keyed by target name. Each value in the
-    dict is a list of overlay names corresponding to
-    the target.
-  """
-  overlay_map = {}
-  config = get_config(config_file)
-  # The presence of the config file is optional
-  if config:
-    for target in config.findall('target'):
-      name = target.get('name')
-      overlay_list = [o.get('name') for o in target.findall('overlay')]
-      overlay_map[name] = overlay_list
-    # A valid configuration file is required
-    # to have at least one overlay target
-    if not overlay_map:
-      raise ValueError('Error: the overlay configuration file '
-          'is missing at least one overlay target')
-
-  return overlay_map
-
-def get_fs_view_map(config_file):
-  """Retrieves the map of filesystem views for each target.
-
-  Args:
-    config_file: A string path to the XML config file.
-
-  Returns:
-    A dict of filesystem views keyed by target name.
-    A filesystem view is a list of (source, destination)
-    string path tuples.
-  """
-  fs_view_map = {}
-  config = get_config(config_file)
-
-  # The presence of the config file is optional
-  if config:
-    # A valid config file is not required to
-    # include FS Views, only overlay targets
-    views = {}
-    for view in config.findall('view'):
-      name = view.get('name')
-      paths = []
-      for path in view.findall('path'):
-        paths.append((
-              path.get('source'),
-              path.get('destination')))
-      views[name] = paths
-
-    for target in config.findall('target'):
-      target_name = target.get('name')
-      view_paths = []
-      for view in target.findall('view'):
-        view_paths.extend(views[view.get('name')])
-
-      if view_paths:
-        fs_view_map[target_name] = view_paths
-
-  return fs_view_map
+    if not self._quiet:
+      print('Applied overlays ' + ' '.join(self._overlay_dirs))
diff --git a/build/sandbox/overlay_test.py b/build/sandbox/overlay_test.py
index 8e93cf7..088dda6 100644
--- a/build/sandbox/overlay_test.py
+++ b/build/sandbox/overlay_test.py
@@ -23,6 +23,7 @@
 import subprocess
 import tempfile
 import unittest
+from . import config
 from . import overlay
 import re
 
@@ -32,9 +33,44 @@
   def setUp(self):
     self.source_dir = tempfile.mkdtemp()
     self.destination_dir = tempfile.mkdtemp()
+    #
+    # base_dir/
+    #   base_project/
+    #     .git
+    # no_git_dir/
+    #   no_git_subdir1/
+    #     no_git_file1
+    #   no_git_subdir2/
+    #     no_git_file2
+    # overlays/
+    #   unittest1/
+    #     from_dir/
+    #       .git/
+    #     upper_subdir/
+    #       lower_subdir/
+    #         from_unittest1/
+    #           .git/
+    #     from_file
+    #   unittest2/
+    #     upper_subdir/
+    #       lower_subdir/
+    #         from_unittest2/
+    #           .git/
+    #   no_git_dir2/
+    #     no_git_subdir1/
+    #     no_git_subdir2/
+    #       .bindmount
+    #
     os.mkdir(os.path.join(self.source_dir, 'base_dir'))
     os.mkdir(os.path.join(self.source_dir, 'base_dir', 'base_project'))
     os.mkdir(os.path.join(self.source_dir, 'base_dir', 'base_project', '.git'))
+    os.mkdir(os.path.join(self.source_dir, 'no_git_dir'))
+    os.mkdir(os.path.join(self.source_dir, 'no_git_dir', 'no_git_subdir1'))
+    open(os.path.join(self.source_dir,
+                      'no_git_dir', 'no_git_subdir1', 'no_git_file1'), 'a').close()
+    os.mkdir(os.path.join(self.source_dir, 'no_git_dir', 'no_git_subdir2'))
+    open(os.path.join(self.source_dir,
+                      'no_git_dir', 'no_git_subdir2', 'no_git_file2'), 'a').close()
     os.mkdir(os.path.join(self.source_dir, 'overlays'))
     os.mkdir(os.path.join(self.source_dir,
                           'overlays', 'unittest1'))
@@ -75,6 +111,15 @@
                           'overlays', 'unittest2', 'upper_subdir',
                           'lower_subdir', 'from_unittest2', '.git'))
 
+    os.mkdir(os.path.join(self.source_dir, 'overlays', 'no_git_dir2'))
+    os.mkdir(os.path.join(self.source_dir,
+                          'overlays', 'no_git_dir2', 'no_git_subdir1'))
+    os.mkdir(os.path.join(self.source_dir,
+                          'overlays', 'no_git_dir2', 'no_git_subdir2'))
+    open(os.path.join(self.source_dir,
+                      'overlays', 'no_git_dir2', 'no_git_subdir2', '.bindmount'),
+         'a').close()
+
   def tearDown(self):
     shutil.rmtree(self.source_dir)
 
@@ -85,19 +130,54 @@
         '<config>'
         '  <target name="unittest">'
         '    <overlay name="unittest1"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
         '  </target>'
         '</config>'
         )
       test_config.flush()
       o = overlay.BindOverlay(
-          config_file=test_config.name,
-          target='unittest',
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
           source_dir=self.source_dir)
     self.assertIsNotNone(o)
     bind_mounts = o.GetBindMounts()
     bind_source = os.path.join(self.source_dir, 'overlays/unittest1/from_dir')
     bind_destination = os.path.join(self.source_dir, 'from_dir')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
+    self.assertIn(os.path.join(self.source_dir, 'base_dir', 'base_project'), bind_mounts)
+
+  def testValidTargetOverlayBindsAllowedProjects(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config, \
+        tempfile.NamedTemporaryFile('w+t') as test_allowed_projects:
+      test_config.write(
+        '<?xml version="1.0" encoding="UTF-8" ?>'
+        '<config>'
+        '  <target name="unittest">'
+        '    <overlay name="unittest1"/>'
+        '    <build_config allowed_projects_file="%s">'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
+        '  </target>'
+        '</config>' % test_allowed_projects.name
+        )
+      test_config.flush()
+      test_allowed_projects.write(
+        '<?xml version="1.0" encoding="UTF-8" ?>'
+        '<manifest>'
+        '  <project name="from_dir" path="overlays/unittest1/from_dir"/>'
+        '</manifest>'
+        )
+      test_allowed_projects.flush()
+      o = overlay.BindOverlay(
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
+          source_dir=self.source_dir)
+    self.assertIsNotNone(o)
+    bind_mounts = o.GetBindMounts()
+    self.assertIn(os.path.join(self.source_dir, 'from_dir'), bind_mounts)
+    self.assertNotIn(os.path.join(self.source_dir, 'base_dir', 'base_project'), bind_mounts)
 
   def testMultipleOverlays(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -107,27 +187,30 @@
         '  <target name="unittest">'
         '    <overlay name="unittest1"/>'
         '    <overlay name="unittest2"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
         '  </target>'
         '</config>'
         )
       test_config.flush()
       o = overlay.BindOverlay(
-          config_file=test_config.name,
-          target='unittest',
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
           source_dir=self.source_dir)
     self.assertIsNotNone(o)
     bind_mounts = o.GetBindMounts()
     bind_source = os.path.join(self.source_dir,
       'overlays/unittest1/upper_subdir/lower_subdir/from_unittest1')
     bind_destination = os.path.join(self.source_dir, 'upper_subdir/lower_subdir/from_unittest1')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
     bind_source = os.path.join(self.source_dir,
       'overlays/unittest2/upper_subdir/lower_subdir/from_unittest2')
     bind_destination = os.path.join(self.source_dir,
       'upper_subdir/lower_subdir/from_unittest2')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
 
-  def testMultipleOverlaysWithWhitelist(self):
+  def testMultipleOverlaysWithAllowlist(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
       test_config.write(
         '<?xml version="1.0" encoding="UTF-8" ?>'
@@ -135,14 +218,17 @@
         '  <target name="unittest">'
         '    <overlay name="unittest1"/>'
         '    <overlay name="unittest2"/>'
+        '    <allow_readwrite path="overlays/unittest1/upper_subdir/lower_subdir/from_unittest1"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
         '  </target>'
         '</config>'
         )
       test_config.flush()
-      rw_whitelist = set('overlays/unittest1/uppser_subdir/lower_subdir/from_unittest1')
       o = overlay.BindOverlay(
-          config_file=test_config.name,
-          target='unittest',
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
           source_dir=self.source_dir)
     self.assertIsNotNone(o)
     bind_mounts = o.GetBindMounts()
@@ -151,12 +237,48 @@
     bind_destination = os.path.join(self.source_dir, 'upper_subdir/lower_subdir/from_unittest1')
     self.assertEqual(
         bind_mounts[bind_destination],
-        overlay.BindMount(source_dir=bind_source, readonly=False))
+        overlay.BindMount(source_dir=bind_source, readonly=False, allows_replacement=False))
     bind_source = os.path.join(self.source_dir,
       'overlays/unittest2/upper_subdir/lower_subdir/from_unittest2')
     bind_destination = os.path.join(self.source_dir,
       'upper_subdir/lower_subdir/from_unittest2')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
+
+  def testAllowReadWriteNoGitDir(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(
+        '<?xml version="1.0" encoding="UTF-8" ?>'
+        '<config>'
+        '  <target name="unittest">'
+        '    <overlay name="unittest1"/>'
+        '    <overlay name="unittest2"/>'
+        '    <allow_readwrite path="no_git_dir/no_git_subdir1"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
+        '  </target>'
+        '</config>'
+        )
+      test_config.flush()
+      o = overlay.BindOverlay(
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
+          source_dir=self.source_dir)
+    self.assertIsNotNone(o)
+    bind_mounts = o.GetBindMounts()
+    bind_source = os.path.join(self.source_dir,
+      'no_git_dir/no_git_subdir1')
+    bind_destination = os.path.join(self.source_dir, 'no_git_dir/no_git_subdir1')
+    self.assertIn(bind_destination, bind_mounts)
+    self.assertEqual(
+        bind_mounts[bind_destination],
+        overlay.BindMount(source_dir=bind_source, readonly=False, allows_replacement=False))
+    bind_source = os.path.join(self.source_dir,
+      'no_git_dir/no_git_subdir2')
+    bind_destination = os.path.join(self.source_dir,
+      'no_git_dir/no_git_subdir2')
+    self.assertIn(bind_destination, bind_mounts)
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
 
   def testValidOverlaidDir(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -165,20 +287,23 @@
         '<config>'
         '  <target name="unittest">'
         '    <overlay name="unittest1"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
         '  </target>'
         '</config>'
         )
       test_config.flush()
       o = overlay.BindOverlay(
-          config_file=test_config.name,
-          target='unittest',
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
           source_dir=self.source_dir,
           destination_dir=self.destination_dir)
     self.assertIsNotNone(o)
     bind_mounts = o.GetBindMounts()
     bind_source = os.path.join(self.source_dir, 'overlays/unittest1/from_dir')
     bind_destination = os.path.join(self.destination_dir, 'from_dir')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
 
   def testValidFilesystemViewDirectoryBind(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -187,6 +312,9 @@
         '<config>'
         '  <target name="unittest">'
         '    <view name="unittestview"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
         '  </target>'
         '  <view name="unittestview">'
         '    <path source="overlays/unittest1/from_dir" '
@@ -196,14 +324,14 @@
         )
       test_config.flush()
       o = overlay.BindOverlay(
-          config_file=test_config.name,
-          target='unittest',
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
           source_dir=self.source_dir)
     self.assertIsNotNone(o)
     bind_mounts = o.GetBindMounts()
     bind_source = os.path.join(self.source_dir, 'overlays/unittest1/from_dir')
     bind_destination = os.path.join(self.source_dir, 'to_dir')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
 
   def testValidFilesystemViewFileBind(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -212,6 +340,9 @@
           '<config>'
           '  <target name="unittest">'
           '    <view name="unittestview"/>'
+          '    <build_config>'
+          '      <goal name="goal_name"/>'
+          '    </build_config>'
           '  </target>'
           '  <view name="unittestview">'
           '    <path source="overlays/unittest1/from_file" '
@@ -221,14 +352,14 @@
           )
       test_config.flush()
       o = overlay.BindOverlay(
-          config_file=test_config.name,
-          target='unittest',
+          cfg=config.factory(test_config.name),
+          build_target='unittest',
           source_dir=self.source_dir)
     self.assertIsNotNone(o)
     bind_mounts = o.GetBindMounts()
     bind_source = os.path.join(self.source_dir, 'overlays/unittest1/from_file')
     bind_destination = os.path.join(self.source_dir, 'to_file')
-    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, False))
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
 
   def testInvalidTarget(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -237,16 +368,74 @@
         '<config>'
         '  <target name="unittest">'
         '    <overlay name="unittest1"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
         '  </target>'
         '</config>'
         )
       test_config.flush()
       with self.assertRaises(KeyError):
         overlay.BindOverlay(
-            config_file=test_config.name,
-            target='unknown',
+            cfg=config.factory(test_config.name),
+            build_target='unknown',
             source_dir=self.source_dir)
 
+  def testExplicitBindMount(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(
+        '<?xml version="1.0" encoding="UTF-8" ?>'
+        '<config>'
+        '  <target name="target_name">'
+        '    <overlay name="no_git_dir2"/>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
+        '  </target>'
+        '</config>'
+        )
+      test_config.flush()
+      o = overlay.BindOverlay(
+          cfg=config.factory(test_config.name),
+          build_target='target_name',
+          source_dir=self.source_dir)
+    self.assertIsNotNone(o)
+    bind_mounts = o.GetBindMounts()
+
+    bind_source = os.path.join(self.source_dir, 'overlays/no_git_dir2/no_git_subdir1')
+    bind_destination = os.path.join(self.source_dir, 'no_git_subdir1')
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
+
+    bind_source = os.path.join(self.source_dir, 'overlays/no_git_dir2/no_git_subdir2')
+    bind_destination = os.path.join(self.source_dir, 'no_git_subdir2')
+    self.assertEqual(bind_mounts[bind_destination], overlay.BindMount(bind_source, True, False))
+
+  def testReplacementPath(self):
+    with tempfile.NamedTemporaryFile('w+t') as test_config:
+      test_config.write(
+        '<?xml version="1.0" encoding="UTF-8" ?>'
+        '<config>'
+        '  <target name="unittest">'
+        '    <overlay name="unittest1">'
+        '     <replacement_path path="from_dir"/>'
+        '    </overlay>'
+        '    <build_config>'
+        '      <goal name="goal_name"/>'
+        '    </build_config>'
+        '  </target>'
+        '</config>'
+        )
+      test_config.flush()
+      o = overlay.BindOverlay(
+            cfg=config.factory(test_config.name),
+            build_target='unittest',
+            source_dir=self.source_dir)
+    self.assertIsNotNone(o)
+    bind_mounts = o.GetBindMounts()
+    bind_source = os.path.join(self.source_dir, 'overlays/unittest1/from_dir')
+    bind_destination = os.path.join(self.source_dir, 'from_dir')
+    self.assertEqual(bind_mounts[bind_destination],
+                     overlay.BindMount(bind_source, True, True))
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/build/sandbox/rbe.py b/build/sandbox/rbe.py
new file mode 100644
index 0000000..fba368f
--- /dev/null
+++ b/build/sandbox/rbe.py
@@ -0,0 +1,195 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utilities for RBE-enabled builds."""
+
+import os
+import random
+import subprocess
+import tempfile
+
+# These are the environment variables that control RBE usage with the
+# --use_rbe flag. If defined on the environment, the values will be
+# propagated to the build; otherwise, those defaults will be used.
+TOOLS_DIR = 'prebuilts/remoteexecution-client/latest'
+_RBE_ENV = {
+    'USE_RBE': 'true',
+    'RBE_DIR': TOOLS_DIR,
+    'NINJA_REMOTE_NUM_JOBS': '500',
+    'FLAG_log_dir': 'out',
+    'FLAG_server_address': 'unix:///tmp/reproxy_%s.sock' % random.randint(0,100000),
+    'FLAG_exec_root': '/src',
+    'FLAG_invocation_id': 'treble-%s' % random.randint(0,100000),
+    'RBE_use_application_default_credentials': 'true',
+    'RBE_reproxy_wait_seconds': '20',
+    'RBE_output_dir': 'out',
+    'RBE_proxy_log_dir': 'out',
+    'RBE_cpp_dependency_scanner_plugin': os.path.join(TOOLS_DIR, 'dependency_scanner_go_plugin.so'),
+    'RBE_re_proxy': os.path.join(TOOLS_DIR, 'reproxy'),
+    'RBE_JAVAC': 'true',
+    'RBE_D8': 'true',
+    'RBE_R8': 'true',
+}
+
+
+def get_nsjail_bin_wrapper():
+  """Returns the command executed in a closed network namespace."""
+  return ['netns-exec', 'rbe-closed-ns']
+
+
+def env_array_to_dict(env_array):
+  """Converts an env var array to a dict.
+
+  Args:
+    env: An array of environment variables in the `var=val` syntax.
+
+  Returns:
+    A dict of string values keyed by string names.
+  """
+  env_dict = {}
+  for var in env_array:
+    var = var.split('=')
+    name = var[0]
+    value = var[1]
+    env_dict[name] = value
+  return env_dict
+
+def prepare_env(env):
+  """Prepares an env dict for enabling RBE.
+
+  Checks that all environment variables required to be set
+  by the user are defined and sets some default
+  values for optional environment variables
+
+  Args:
+    env: An array of environment variables in the `var=val` syntax.
+
+  Returns:
+    An array of environment variables in the `var=val` syntax.
+  """
+  # Start with the default values
+  prepared_env = _RBE_ENV.copy()
+
+  # Host environment variables take precedence over defaults.
+  for k,v in os.environ.items():
+    if k.startswith('RBE_'):
+      prepared_env[k] = v
+
+  # Input parameter variables take precedence over everything else
+  prepared_env.update(env_array_to_dict(env))
+
+  if 'RBE_instance' not in prepared_env:
+    raise EnvironmentError('The RBE_instance environment '
+                           'variables must be defined')
+
+  if 'RBE_service' not in prepared_env:
+    raise EnvironmentError('The RBE_service environment '
+                           'variables must be defined')
+
+  return ['%s=%s' % (k,v) for k,v in prepared_env.items()]
+
+
+def get_readonlybind_mounts():
+  """Returns a dictionary of readonly bind mounts"""
+  creds_file = '.config/gcloud/application_default_credentials.json'
+  # Bind the gcloud credentials file, if present, to authenticate.
+  source_creds_file = os.path.join(os.getenv('HOME'), creds_file)
+  dest_creds_file = os.path.join('/tmp', creds_file)
+  if not os.path.exists(source_creds_file):
+    raise IOError('Required credentials file not found: ' + source_creds_file)
+  return ['%s:%s' % (source_creds_file, dest_creds_file)]
+
+
+def get_extra_nsjail_args():
+  """Returns a dictionary of extra nsjail.run arguments for RBE."""
+  # The nsjail should be invoked in a closed network namespace.
+  return ['--disable_clone_newnet']
+
+
+def setup(env, build_log=subprocess.DEVNULL):
+  """Prerequisite for having RBE enabled for the build.
+
+  Calls RBE http proxy in a separate network namespace.
+
+  Args:
+    env: An array of environment variables in the `var=val` syntax.
+    build_log: a file handle to write executed commands to.
+
+  Returns:
+    A cleanup function to be called after the build is done.
+  """
+  env_dict = env_array_to_dict(env)
+
+  # Create the RBE http proxy allowlist file.
+  if 'RBE_service' in env_dict:
+    rbe_service = env_dict['RBE_service']
+  else:
+    rbe_service = os.getenv('RBE_service')
+  if not rbe_service:
+    raise EnvironmentError('The RBE_service environment '
+                           'variables must be defined')
+  if ':' in rbe_service:
+    rbe_service = rbe_service.split(':', 1)[0]
+  rbe_allowlist = [
+      rbe_service,
+      'oauth2.googleapis.com',
+      'accounts.google.com',
+  ]
+  with open('/tmp/rbe_allowlist.txt', 'w+') as t:
+    for w in rbe_allowlist:
+      t.write(w + '\n')
+
+  # Restart RBE http proxy.
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  proxy_kill_command = ['killall', 'tinyproxy']
+  port = 8000 + random.randint(0,1000)
+  new_conf_contents = ''
+  with open(os.path.join(script_dir, 'rbe_http_proxy.conf'), 'r') as base_conf:
+    new_conf_contents = base_conf.read()
+  with tempfile.NamedTemporaryFile(prefix='rbe_http_proxy_', mode='w', delete=False) as new_conf:
+    new_conf.write(new_conf_contents)
+    new_conf.write('\nPort %i\n' % port)
+    new_conf.close()
+  env.append("RBE_HTTP_PROXY=10.1.2.1:%i" % port)
+
+  proxy_command = [
+      'netns-exec', 'rbe-open-ns', 'tinyproxy', '-c', new_conf.name, '-d']
+  rbe_proxy_log = tempfile.NamedTemporaryFile(prefix='tinyproxy_', delete=False)
+  if build_log != subprocess.DEVNULL:
+    print('RBE http proxy restart commands:', file=build_log)
+    print(' '.join(proxy_kill_command), file=build_log)
+    print('cd ' + script_dir, file=build_log)
+    print(' '.join(proxy_command) + ' &> ' + rbe_proxy_log.name + ' &',
+          file=build_log)
+  subprocess.call(
+      proxy_kill_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+  rbe_proxy = subprocess.Popen(
+      proxy_command,
+      cwd=script_dir,
+      stdout=rbe_proxy_log,
+      stderr=rbe_proxy_log)
+
+  def cleanup():
+    """Should be called after an RBE build is done."""
+    if build_log != subprocess.DEVNULL:
+      print('RBE http proxy kill command:', file=build_log)
+      print(' '.join(proxy_kill_command), file=build_log)
+    rbe_proxy.terminate()
+    # TODO(diegowilson): Calling wait() sometimes dead locks.
+    # Not sure if it's a tinyproxy bug or the issue described in the wait() documentation
+    # https://docs.python.org/2/library/subprocess.html#subprocess.Popen.wait
+    # rbe_proxy.wait()
+    rbe_proxy_log.close()
+
+  return cleanup
diff --git a/build/sandbox/rbe_action.py b/build/sandbox/rbe_action.py
new file mode 100644
index 0000000..9748fdf
--- /dev/null
+++ b/build/sandbox/rbe_action.py
@@ -0,0 +1,83 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Runs a single action remotely with RBE."""
+
+import argparse
+import os
+import rbe
+import subprocess
+import sys
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      '--command',
+      default='echo RBE check successful.',
+      help='Command to run remotely with RBE.')
+  parser.add_argument(
+      '--print', '-p',
+      action='store_true',
+      help='Prints the executed commands')
+  args = parser.parse_args()
+
+  env = []
+  cleanup = rbe.setup(env, sys.stdout if args.print else subprocess.DEVNULL)
+  src_root = os.path.normpath(
+      os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../..'))
+  env = rbe.env_array_to_dict(rbe.prepare_env(env))
+  env['PATH'] = os.getenv('PATH')
+  for d in ['FLAG_log_dir', 'RBE_output_dir', 'RBE_proxy_log_dir']:
+    env[d] = '/tmp'  # We want the logs in /tmp instead of out.
+  try:
+    # Bootstrap the RBE proxy.
+    bootstrap_cmd = rbe.get_nsjail_bin_wrapper() + \
+      [os.path.join(rbe.TOOLS_DIR, 'bootstrap')]
+    shell_env = ' '.join(['%s=%s' % (k,v) for k, v in env.items()])
+    if args.print:
+      print('Bootstrap RBE reproxy:')
+      print('cd ' + src_root)
+      print('%s %s' % (shell_env, ' '.join(bootstrap_cmd)))
+    subprocess.check_call(
+        bootstrap_cmd, env=env, cwd=src_root, stdout=subprocess.DEVNULL)
+    # Execute the remote command.
+    rewrapper_cmd = rbe.get_nsjail_bin_wrapper() + [
+        os.path.join(rbe.TOOLS_DIR, 'rewrapper'),
+        '--platform=container-image=docker://gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62', \
+        '--labels=type=tool',
+        '--exec_strategy=remote',
+        '--dial_timeout=5s',
+        '--exec_root=' + src_root,
+        '--',
+    ] + args.command.split()
+    if args.print:
+      print('Run remote command with RBE:')
+      print('%s %s' % (shell_env, ' '.join(rewrapper_cmd)))
+    subprocess.check_call(rewrapper_cmd, env=env, cwd=src_root)
+  finally:
+    # Shut down the RBE proxy.
+    if args.print:
+      print('RBE proxy shutdown:')
+      print('killall reproxy')
+    subprocess.call(
+        ['killall', 'reproxy'],
+        stdout=subprocess.DEVNULL,
+        stderr=subprocess.DEVNULL)
+    cleanup()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/sandbox/rbe_http_proxy.conf b/build/sandbox/rbe_http_proxy.conf
new file mode 100644
index 0000000..474858d
--- /dev/null
+++ b/build/sandbox/rbe_http_proxy.conf
@@ -0,0 +1,40 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# IP to listen on.
+Listen 10.1.2.1
+
+# IP on which external connections are created.
+Bind 10.1.3.2
+
+# Allow only requests from this IP.
+Allow 127.0.0.1
+Allow 10.1.1.1
+
+# Allow only requests on these ports.
+ConnectPort 443
+ConnectPort 80
+
+# Allow these many parallel connections and underlying server connections.
+MaxClients 10000
+StartServers 10
+MinSpareServers 10
+
+# Deny requests that don't match the domain allowlist defined below.
+FilterDefaultDeny Yes
+
+# Domain allowslist file, generated in code.
+Filter "/tmp/rbe_allowlist.txt"
+
+# Port to listen on. To be added by rbe.py.
diff --git a/build/sandbox/rbe_test.py b/build/sandbox/rbe_test.py
new file mode 100644
index 0000000..51a7b79
--- /dev/null
+++ b/build/sandbox/rbe_test.py
@@ -0,0 +1,36 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test rbe."""
+
+import unittest
+from . import rbe
+
+class RBETest(unittest.TestCase):
+  """unittest for RBE."""
+
+  def testExtraNsjailArgs(self):
+    self.assertEqual(rbe.get_extra_nsjail_args()[0], '--disable_clone_newnet')
+
+  def testEnv(self):
+    env = rbe.prepare_env(env=["RBE_instance=test_instance", "RBE_service=test_service"])
+    self.assertIn('USE_RBE=true', env)
+    self.assertIn('RBE_instance=test_instance', env)
+    self.assertIn('RBE_JAVAC=true', env)
+    self.assertIn('RBE_D8=true', env)
+    self.assertIn('RBE_R8=true', env)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/sandbox/sample_config.xml b/build/sandbox/sample_config.xml
new file mode 100644
index 0000000..dbbd412
--- /dev/null
+++ b/build/sandbox/sample_config.xml
@@ -0,0 +1,35 @@
+<?xml version = "1.0" encoding = "UTF-8" ?>
+<!--
+Defines sample build configuration file.
+-->
+<config>
+  <target name="aosp_cf_x86_phone_default" android_target="aosp_cf_x86_phone" 
+      allow_readwrite_all="true">
+    <!-- Target elements can define elements and attributes that are inherited
+         by build_config child elements. -->
+    <goal name="droid"/>
+    <build_config>
+      <!-- build_config name will inherit the name aosp_cf_x86_phone_default
+           and append dist to the goal list. -->
+      <goal name="dist"/>
+    </build_config>
+    <build_config name="aosp_cf_x86_phone_no_dist" tags="skip">
+    </build_config>
+    <build_config name="aosp_cf_x86_phone_ro" allow_readwrite_all="false">
+      <!-- This build_config will override allow_readwrite_all attribute. -->
+    </build_config>
+    <build_config name="aosp_cf_x86_tv" android_target="aosp_cf_x86_tv">
+      <!-- This build will use aosp_cf_x86_tv as the lunch target. -->
+    </build_config>
+  </target>
+  <target name="aosp_car_arm64">
+    <!-- If android_target isn't provided target will use name as default
+         android_target. -->
+    <goal name="droid"/>
+    <goal name="dist"/>
+    <build_config>
+      <!-- build_config will inherit the name and android_target: 
+           aosp_car_arm64. -->
+    </build_config>
+  </target>
+</config>
diff --git a/fetcher/Android.bp b/fetcher/Android.bp
new file mode 100644
index 0000000..787d0b7
--- /dev/null
+++ b/fetcher/Android.bp
@@ -0,0 +1,49 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "tools_treble_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["tools_treble_license"],
+}
+
+python_defaults {
+  name: "fetcher-defaults",
+  version: {
+      py2: {
+          enabled: false,
+          embedded_launcher: false,
+      },
+      py3: {
+          enabled: true,
+          embedded_launcher: false,
+      },
+  },
+}
+
+
+python_library_host {
+  name: "fetcher-lib",
+  defaults: ["fetcher-defaults"],
+  srcs: [
+    "fetcher_lib.py"
+  ],
+  pkg_path: "treble/fetcher",
+  libs: [
+      "py-google-api-python-client",
+      "py-oauth2client",
+      "py-six",
+  ],
+}
+
+python_binary_host {
+    name: "fetcher",
+    main: "fetcher.py",
+    defaults: ["fetcher-defaults"],
+    srcs: [
+        "fetcher.py",
+    ],
+    libs: [
+        "fetcher-lib",
+    ],
+}
diff --git a/fetcher/fetcher.py b/fetcher/fetcher.py
new file mode 100644
index 0000000..b78d316
--- /dev/null
+++ b/fetcher/fetcher.py
@@ -0,0 +1,66 @@
+"""Fetches artifacts from Android Build."""
+import argparse
+import os
+
+from treble.fetcher import fetcher_lib
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      '--json_keyfile',
+      help='JSON keyfile containing credentials. '
+      '(Default: Use default credential file)')
+  parser.add_argument(
+      '--target', required=True, help='The target name to download from.')
+  parser.add_argument(
+      '--artifact',
+      action='append',
+      default=[],
+      help='The name of the artifact to download. '
+      'Can be specified multiple times.')
+  parser.add_argument(
+      '--regex',
+      action='append',
+      default=[],
+      help='A regex pattern to compare to the names of the artifact to '
+      'download. Can be specified multiple times.')
+
+  parser.add_argument(
+      '--out_dir',
+      default='out/artifacts/',
+      help='Path to store fetched artifact to.')
+
+  group = parser.add_mutually_exclusive_group(required=True)
+  group.add_argument(
+      '--branch', help='Download from the latest build of this branch.')
+  group.add_argument('--build_id', help='Download from the specified build.')
+
+  args = parser.parse_args()
+  client = fetcher_lib.create_client_from_json_keyfile(
+      json_keyfile_name=args.json_keyfile)
+
+  build_id = fetcher_lib.get_latest_build_id(
+      client=client, branch=args.branch,
+      target=args.target) if args.branch else args.build_id
+
+  for artifact in args.artifact:
+    fetcher_lib.fetch_artifact(
+        client=client,
+        build_id=build_id,
+        target=args.target,
+        resource_id=artifact,
+        dest=os.path.join(args.out_dir, artifact))
+
+  for re in args.regex:
+    fetcher_lib.fetch_artifacts(
+        client=client,
+        build_id=build_id,
+        target=args.target,
+        pattern=re,
+        out_dir=args.out_dir)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/fetcher/fetcher_lib.py b/fetcher/fetcher_lib.py
new file mode 100644
index 0000000..0ec0173
--- /dev/null
+++ b/fetcher/fetcher_lib.py
@@ -0,0 +1,284 @@
+"""Provides helper functions for fetching artifacts."""
+
+import io
+import os
+import re
+import sys
+import sysconfig
+import time
+
+# This is a workaround to put '/usr/lib/python3.X' ahead of googleapiclient
+# Using embedded_launcher won't work since py3-cmd doesn't contain _ssl module.
+if sys.version_info.major == 3:
+  sys.path.insert(0, os.path.dirname(sysconfig.get_paths()['purelib']))
+
+# pylint: disable=import-error,g-bad-import-order,g-import-not-at-top
+import apiclient
+from googleapiclient.discovery import build
+from six.moves import http_client
+
+import httplib2
+from oauth2client.service_account import ServiceAccountCredentials
+
+_SCOPE_URL = 'https://www.googleapis.com/auth/androidbuild.internal'
+_DEF_JSON_KEYFILE = '.config/gcloud/application_default_credentials.json'
+
+
+# 20 MB default chunk size -- used in Buildbot
+_DEFAULT_CHUNK_SIZE = 20 * 1024 * 1024
+
+# HTTP errors -- used in Builbot
+_DEFAULT_MASKED_ERRORS = [404]
+_DEFAULT_RETRIED_ERRORS = [503]
+_DEFAULT_RETRIES = 10
+
+
+def _create_http_from_p12(robot_credentials_file, robot_username):
+  """Creates a credentialed HTTP object for requests.
+
+  Args:
+    robot_credentials_file: The path to the robot credentials file.
+    robot_username: A string containing the username of the robot account.
+
+  Returns:
+    An authorized httplib2.Http object.
+  """
+  try:
+    credentials = ServiceAccountCredentials.from_p12_keyfile(
+        service_account_email=robot_username,
+        filename=robot_credentials_file,
+        scopes=_SCOPE_URL)
+  except AttributeError:
+    raise ValueError('Machine lacks openssl or pycrypto support')
+  http = httplib2.Http()
+  return credentials.authorize(http)
+
+
+def _simple_execute(http_request,
+                    masked_errors=None,
+                    retried_errors=None,
+                    retry_delay_seconds=5,
+                    max_tries=_DEFAULT_RETRIES):
+  """Execute http request and return None on specified errors.
+
+  Args:
+    http_request: the apiclient provided http request
+    masked_errors: list of errors to return None on
+    retried_errors: list of erros to retry the request on
+    retry_delay_seconds: how many seconds to sleep before retrying
+    max_tries: maximum number of attmpts to make request
+
+  Returns:
+    The result on success or None on masked errors.
+  """
+  if not masked_errors:
+    masked_errors = _DEFAULT_MASKED_ERRORS
+  if not retried_errors:
+    retried_errors = _DEFAULT_RETRIED_ERRORS
+
+  last_error = None
+  for _ in range(max_tries):
+    try:
+      return http_request.execute()
+    except http_client.errors.HttpError as e:
+      last_error = e
+      if e.resp.status in masked_errors:
+        return None
+      elif e.resp.status in retried_errors:
+        time.sleep(retry_delay_seconds)
+      else:
+        # Server Error is server error
+        raise e
+
+  # We've gone through the max_retries, raise the last error
+  raise last_error  # pylint: disable=raising-bad-type
+
+
+def create_client(http):
+  """Creates an Android build api client from an authorized http object.
+
+  Args:
+     http: An authorized httplib2.Http object.
+
+  Returns:
+    An authorized android build api client.
+  """
+  return build(serviceName='androidbuildinternal', version='v2beta1', http=http)
+
+
+def create_client_from_json_keyfile(json_keyfile_name=None):
+  """Creates an Android build api client from a json keyfile.
+
+  Args:
+    json_keyfile_name: The location of the keyfile, if None is provided use
+                       default location.
+
+  Returns:
+    An authorized android build api client.
+  """
+  if not json_keyfile_name:
+    json_keyfile_name = os.path.join(os.getenv('HOME'), _DEF_JSON_KEYFILE)
+
+  credentials = ServiceAccountCredentials.from_json_keyfile_name(
+      filename=json_keyfile_name, scopes=_SCOPE_URL)
+  http = httplib2.Http()
+  credentials.authorize(http)
+  return create_client(http)
+
+
+def create_client_from_p12(robot_credentials_file, robot_username):
+  """Creates an Android build api client from a config file.
+
+  Args:
+    robot_credentials_file: The path to the robot credentials file.
+    robot_username: A string containing the username of the robot account.
+
+  Returns:
+    An authorized android build api client.
+  """
+  http = _create_http_from_p12(robot_credentials_file, robot_username)
+  return create_client(http)
+
+
+def fetch_artifact(client, build_id, target, resource_id, dest):
+  """Fetches an artifact.
+
+  Args:
+    client: An authorized android build api client.
+    build_id: AB build id
+    target: the target name to download from
+    resource_id: the resource id of the artifact
+    dest: path to store the artifact
+  """
+  out_dir = os.path.dirname(dest)
+  if not os.path.exists(out_dir):
+    os.makedirs(out_dir)
+
+  dl_req = client.buildartifact().get_media(
+      buildId=build_id,
+      target=target,
+      attemptId='latest',
+      resourceId=resource_id)
+
+  print('Fetching %s to %s...' % (resource_id, dest))
+  with io.FileIO(dest, mode='wb') as fh:
+    downloader = apiclient.http.MediaIoBaseDownload(
+        fh, dl_req, chunksize=_DEFAULT_CHUNK_SIZE)
+    done = False
+    while not done:
+      status, done = downloader.next_chunk(num_retries=_DEFAULT_RETRIES)
+      print('Fetching...' + str(status.progress() * 100))
+
+  print('Done Fetching %s to %s' % (resource_id, dest))
+
+
+def get_build_list(client, **kwargs):
+  """Get a list of builds from the android build api that matches parameters.
+
+  Args:
+    client: An authorized android build api client.
+    **kwargs: keyworded arguments to pass to build api.
+
+  Returns:
+    Response from build api.
+  """
+  build_request = client.build().list(**kwargs)
+
+  return _simple_execute(build_request)
+
+
+def list_artifacts(client, regex, **kwargs):
+  """List artifacts from the android build api that matches parameters.
+
+  Args:
+    client: An authorized android build api client.
+    regex: Regular expression pattern to match artifact name.
+    **kwargs: keyworded arguments to pass to buildartifact.list api.
+
+  Returns:
+    List of matching artifact names.
+  """
+  matching_artifacts = []
+  kwargs.setdefault('attemptId', 'latest')
+  regex = re.compile(regex)
+  req = client.buildartifact().list(**kwargs)
+  while req:
+    result = _simple_execute(req)
+    if result and 'artifacts' in result:
+      for a in result['artifacts']:
+        if regex.match(a['name']):
+          matching_artifacts.append(a['name'])
+    req = client.buildartifact().list_next(req, result)
+  return matching_artifacts
+
+
+def fetch_artifacts(client, out_dir, target, pattern, build_id):
+  """Fetches target files artifacts matching patterns.
+
+  Args:
+    client: An authorized instance of an android build api client for making
+      requests.
+    out_dir: The directory to store the fetched artifacts to.
+    target: The target name to download from.
+    pattern: A regex pattern to match to artifacts filename.
+    build_id: The Android Build id.
+  """
+  if not os.path.exists(out_dir):
+    os.makedirs(out_dir)
+
+  # Build a list of needed artifacts
+  artifacts = list_artifacts(
+      client=client,
+      regex=pattern,
+      buildId=build_id,
+      target=target)
+
+  for artifact in artifacts:
+    fetch_artifact(
+        client=client,
+        build_id=build_id,
+        target=target,
+        resource_id=artifact,
+        dest=os.path.join(out_dir, artifact))
+
+
+def get_latest_build_id(client, branch, target):
+  """Get the latest build id.
+
+  Args:
+    client: An authorized instance of an android build api client for making
+      requests.
+    branch: The branch to download from
+    target: The target name to download from.
+  Returns:
+    The build id.
+  """
+  build_response = get_build_list(
+      client=client,
+      branch=branch,
+      target=target,
+      maxResults=1,
+      successful=True,
+      buildType='submitted')
+
+  if not build_response:
+    raise ValueError('Unable to determine latest build ID!')
+
+  return build_response['builds'][0]['buildId']
+
+
+def fetch_latest_artifacts(client, out_dir, target, pattern, branch):
+  """Fetches target files artifacts matching patterns from the latest build.
+
+  Args:
+    client: An authorized instance of an android build api client for making
+      requests.
+    out_dir: The directory to store the fetched artifacts to.
+    target: The target name to download from.
+    pattern: A regex pattern to match to artifacts filename
+    branch: The branch to download from
+  """
+  build_id = get_latest_build_id(
+      client=client, branch=branch, target=target)
+
+  fetch_artifacts(client, out_dir, target, pattern, build_id)
diff --git a/hacksaw/README.md b/hacksaw/README.md
new file mode 100644
index 0000000..58bbb60
--- /dev/null
+++ b/hacksaw/README.md
@@ -0,0 +1,180 @@
+# hacksaw
+
+**HACK** in a **S**peedy **A**ccess **W**orkspace
+
+## What is Hacksaw?
+
+If you have a large multi-gigabyte codebase spread out through multiple git projects it can take a long time branch off a clean workspace. Hacksaw is a tool that
+lets you split off a clean workspace in seconds. It does so by only copying git projects that you
+explicitly select to be edited. All other projects are read-only bind mounts. This lets you build without cloning the full codebase to a new location!
+
+## How much faster is it, really?
+
+Lets look at some performance numbers for creating a hacksaw workspace using as a codebase the AOSP master branch as of 2020-8-4. The machine used was a c2-standard-60 Google Cloud Platform VM with 60 vCPUs and 240 GiB of RAM. Each action was performed at least 10 times then averaged out.
+
+* Create a new Hacksaw workspace
+  + Time: 0.4 sec
+  + Disk usage: 7.9 MiB
+
+* Remove a Hacksaw workspace with no edits or build artifacts.
+  + Time: 0.6 sec
+
+* Create a new Hacksaw workspace and edit build/make project.
+  + Time: 0.6 sec
+  + Disk usage: 18 MiB
+
+* Create a new Hacksaw workspace and edit frameworks/base project.
+  + Time: 7.5 sec
+  + Disk usage: 1.3 GiB
+
+As you can see, the time it takes to set up a new hacksaw workspace is proportional to
+the git projects checked out for editing. Contrast that with how long it takes
+to create a workspace using a full repo sync with a local
+mirror.
+
+* Create a new full repo workspace [using a fresh local mirror](https://source.android.com/setup/build/downloading#using-a-local-mirror)
+  + Time: 12 min 32 sec
+  + Disk usage: 88 GiB
+
+* Remove a full repo workspace with no build artifacts
+  + Time: 28 seconds
+
+## Can you give me an example?
+
+```
+$ mkdir ~/aosp
+$ cd ~/aosp
+$ repo init -u https://android.googlesource.com/platform/manifest
+...
+$ repo sync --quiet --current-branch --no-tags --no-clone-bundle --jobs=$(nproc)
+...
+$ hacksaw codebase add aosp ~/aosp
+Added codebase aosp
+$ hacksaw codebase default aosp
+Default codebase set to aosp
+$ hacksaw workspace new big-feature
+Composing.................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+...........................................
+Workspace composed
+Created big-feature at ~/hacksaw/big-feature
+$ hacksaw edit ~/hacksaw/big-feature/tools/treble
+Created branch big-feature on project ~/hacksaw/big-feature/tools/treble
+$ hacksaw workspace new quick-fix
+Composing.................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+..........................................................................
+...........................................
+Workspace composed
+Created big-feature at ~/hacksaw/quick-fix
+$ hacksaw edit ~/hacksaw/quick-fix/tools/treble
+Created branch quick-fix on project ~/hacksaw/quick-fix/tools/treble
+```
+
+## How do I install it?
+
+Building hacksaw requires [golang to be installed](https://golang.org/doc/install).
+To install the hacksaw client run the following:
+
+```
+go get android.googlesource.com/platform/tools/treble.git/hacksaw/cmd/hacksaw
+```
+
+This will install hacksaw to ~/go/bin/hacksaw. You may choose to copy that
+to a location in your path. For example:
+
+```
+sudo cp ~/go/bin/hacksaw /usr/local/bin
+sudo chmod 755 /usr/local/bin/hacksaw
+```
+
+## How do I make sure that creating a hacksaw workspace is fast?
+
+Hacksaw creates bind mounts for all git projects in a codebase. It then
+copies **everything** else. Make sure you remove all build artifacts from a
+codebase before create a workspace, otherwise it may spend a long time copying
+them.
+
+## How do I run it with sudo?
+
+Commands that mount and unmount will require sudo. That includes commands like
+
+* `hacksaw workspace new`
+* `hacksaw edit`
+* `hacksaw workspace remove`
+
+Other commmands like `hacksaw workspace list` or `hacksaw add codebase` do not
+mount or unmount so do not require sudo.
+
+If you would like to avoid using sudo you may install hacksawd as explained below.
+
+## How do I run it without sudo?
+
+Hacksawd is a privileged system daemon whose only job is to manage bind mounts.
+The provided install script will install to your system
+
+```
+go get android.googlesource.com/platform/tools/treble.git/hacksaw/cmd/hacksawd
+sudo cp ~/go/bin/hacksawd /usr/local/bin
+sudo chmod 755 /usr/local/bin/hacksawd
+sudo ~/go/src/android.googlesource.com/platform/tools/treble.git/hacksaw/scripts/install-service.sh
+```
+
+The installation scripts creates a new "hacksaw" group and adds you to it. You
+will need to log out and log back in for the group changes to take effect. After that you should be able to run any hacksaw command without sudo.
+
+If you wish to uninstall the service then run:
+
+```
+sudo ~/go/src/android.googlesource.com/platform/tools/treble.git/hacksaw/scripts/uninstall-service.sh
+sudo rm /usr/local/bin/hacksawd
+```
+## How do I sync?
+
+You sync your codebases using `repo sync`. All updates will be propagated to workspaces.
+Except for projects that you are currently editing. Those will require you to `git pull`
+manually in the workspace project.
+
+## How does hacksaw work?
+
+Hacksaw uses read-only bind mounts to create project references from
+a workspace to a codebase. When you mark a project for editing then
+its read-only bind mount gets replaced by a writable Git worktree.
+
+![Workspace diagram](images/workspace-diagram.png)
+
+
+## What are the known issues?
+
+* Some repo commands don't work yet. Namely: `repo start` and `repo upload`.
+  So at the moment you can only upload to Gerrit [using git
+  push](https://gerrit-review.googlesource.com/Documentation/user-upload.html#_git_push).
+* Failing to create a workspace is not rolled back.
+* Editing nested projects is not supported yet. So if you have a git project
+  that contains other git projects you will get some unexpected behaviour.
+* Git submodules are not supported yet, but the tool is designed with
+  future git submodule support in mind.
+* Syncing a codebase does update the existing projects in all attached
+  workspaces but it does not remove or add new projects. Perhaps there
+  should be a new "workspace sync" command for that?
+
+## Where can I get more help?
+
+You can ask hacksaw-users@googlegroups.com by [joining the group](https://groups.google.com/forum/#!forum/hacksaw-users).
\ No newline at end of file
diff --git a/hacksaw/bind/bind.go b/hacksaw/bind/bind.go
new file mode 100644
index 0000000..ac94b41
--- /dev/null
+++ b/hacksaw/bind/bind.go
@@ -0,0 +1,22 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bind
+
+type PathBinder interface {
+	BindReadOnly(source string, destination string) error
+	BindReadWrite(source string, destination string) error
+	Unbind(destination string) error
+	List() ([]string, error)
+}
diff --git a/hacksaw/bind/local.go b/hacksaw/bind/local.go
new file mode 100644
index 0000000..5679590
--- /dev/null
+++ b/hacksaw/bind/local.go
@@ -0,0 +1,99 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bind
+
+import (
+	"fmt"
+	"path/filepath"
+	"syscall"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/mount"
+)
+
+//localBinder executes PathBinder calls locally
+type localBinder struct {
+	mounter mount.Mounter
+}
+
+func NewLocalPathBinder() PathBinder {
+	var p localBinder
+	p.mounter = mount.NewSystemMounter()
+	return &p
+}
+
+func NewFakePathBinder() PathBinder {
+	var p localBinder
+	p.mounter = mount.NewFakeMounter()
+	return &p
+}
+
+func (p localBinder) checkValidPath(inPath string) error {
+	for dir := filepath.Dir(inPath); dir != "." && dir != "/"; dir = filepath.Dir(dir) {
+		// Only allow mounts in hacksaw path
+		if filepath.Base(dir) == "hacksaw" {
+			return nil
+		}
+	}
+	return fmt.Errorf("Not allowed to bind mount path %s because it's outside a hacksaw workspace", inPath)
+}
+
+func (p localBinder) BindReadOnly(source string, destination string) error {
+	// TODO: check valid path considering sym links
+	source, err := filepath.EvalSymlinks(source)
+	if err != nil {
+		return err
+	}
+	destination, err = filepath.EvalSymlinks(destination)
+	if err != nil {
+		return err
+	}
+	err = p.mounter.Mount(source, destination,
+		"bind", syscall.MS_BIND, "")
+	if err != nil {
+		return err
+	}
+	err = p.mounter.Mount(source, destination,
+		"bind", syscall.MS_REMOUNT|syscall.MS_BIND|syscall.MS_RDONLY, "")
+	return err
+}
+
+func (p localBinder) BindReadWrite(source string, destination string) error {
+	// TODO: check valid path considering sym links
+	source, err := filepath.EvalSymlinks(source)
+	if err != nil {
+		return err
+	}
+	destination, err = filepath.EvalSymlinks(destination)
+	if err != nil {
+		return err
+	}
+	err = p.mounter.Mount(source, destination,
+		"bind", syscall.MS_BIND, "")
+	return err
+}
+
+func (p localBinder) Unbind(destination string) error {
+	// TODO: check valid path considering sym links
+	destination, err := filepath.EvalSymlinks(destination)
+	if err != nil {
+		return err
+	}
+	err = p.mounter.Unmount(destination, syscall.MNT_DETACH)
+	return err
+}
+
+func (p localBinder) List() ([]string, error) {
+	return p.mounter.List()
+}
diff --git a/hacksaw/bind/remoteclient.go b/hacksaw/bind/remoteclient.go
new file mode 100644
index 0000000..122c261
--- /dev/null
+++ b/hacksaw/bind/remoteclient.go
@@ -0,0 +1,103 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bind
+
+import (
+	"errors"
+	"net/rpc"
+)
+
+//RemoteBinderClient sends client requests to RPC
+type RemoteBinderClient struct {
+	rpcClient  *rpc.Client
+	SocketPath string
+}
+
+func NewRemoteBindClient(socketPath string) PathBinder {
+	var r RemoteBinderClient
+	r.SocketPath = socketPath
+	return &r
+}
+
+func (r *RemoteBinderClient) initRpcClient() error {
+	if r.rpcClient != nil {
+		return nil
+	}
+	var err error
+	r.rpcClient, err = rpc.DialHTTP("unix", r.SocketPath)
+	return err
+}
+
+func (r *RemoteBinderClient) BindReadOnly(source string, destination string) error {
+	args := BindReadOnlyArgs{source, destination}
+	var reply BindReadOnlyReply
+	if err := r.initRpcClient(); err != nil {
+		return err
+	}
+	if err := r.rpcClient.Call("Server.BindReadOnly", &args, &reply); err != nil {
+		return err
+	}
+	if reply.Err != "" {
+		return errors.New(reply.Err)
+	}
+	return nil
+}
+
+func (r *RemoteBinderClient) BindReadWrite(source string, destination string) error {
+	args := BindReadWriteArgs{source, destination}
+	var reply BindReadWriteReply
+	if err := r.initRpcClient(); err != nil {
+		return err
+	}
+	if err := r.rpcClient.Call("Server.BindReadWrite", &args, &reply); err != nil {
+		return err
+	}
+	if reply.Err != "" {
+		return errors.New(reply.Err)
+	}
+	return nil
+}
+
+func (r *RemoteBinderClient) Unbind(destination string) error {
+	args := UnbindArgs{destination}
+	var reply UnbindReply
+	if err := r.initRpcClient(); err != nil {
+		return err
+	}
+	if err := r.rpcClient.Call("Server.Unbind", &args, &reply); err != nil {
+		return err
+	}
+	if reply.Err != "" {
+		return errors.New(reply.Err)
+	}
+	return nil
+}
+
+func (r *RemoteBinderClient) List() ([]string, error) {
+	var args ListArgs
+	var reply ListReply
+	if err := r.initRpcClient(); err != nil {
+		return nil, err
+	}
+	if err := r.rpcClient.Call("Server.List", &args, &reply); err != nil {
+		return nil, err
+	}
+
+	if reply.Err != "" {
+		return nil, errors.New(reply.Err)
+	}
+
+	return reply.BindList, nil
+}
diff --git a/hacksaw/bind/remoteserver.go b/hacksaw/bind/remoteserver.go
new file mode 100644
index 0000000..c059635
--- /dev/null
+++ b/hacksaw/bind/remoteserver.go
@@ -0,0 +1,102 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bind
+
+//Server receives RPC requests
+type Server struct {
+	priv PathBinder
+}
+
+func NewServer(binder PathBinder) *Server {
+	var s Server
+	s.priv = binder
+	return &s
+}
+
+type BindReadOnlyArgs struct {
+	Source      string
+	Destination string
+}
+
+type BindReadOnlyReply struct {
+	// Errors types cannot be passed as-is in RPC so they
+	// must be converted to plain strings.
+	// Details at https://github.com/golang/go/issues/23340
+	Err string
+}
+
+func (s Server) BindReadOnly(args *BindReadOnlyArgs, reply *BindReadOnlyReply) error {
+	if err := s.priv.BindReadOnly(args.Source, args.Destination); err != nil {
+		reply.Err = err.Error()
+	}
+	return nil
+}
+
+type BindReadWriteArgs struct {
+	Source      string
+	Destination string
+}
+
+type BindReadWriteReply struct {
+	// Errors types cannot be passed as-is in RPC so they
+	// must be converted to plain strings.
+	// Details at https://github.com/golang/go/issues/23340
+	Err string
+}
+
+func (s Server) BindReadWrite(args *BindReadWriteArgs, reply *BindReadWriteReply) error {
+	if err := s.priv.BindReadWrite(args.Source, args.Destination); err != nil {
+		reply.Err = err.Error()
+	}
+	return nil
+}
+
+type UnbindArgs struct {
+	Destination string
+}
+
+type UnbindReply struct {
+	// Errors types cannot be passed as-is in RPC so they
+	// must be converted to plain strings.
+	// Details at https://github.com/golang/go/issues/23340
+	Err string
+}
+
+func (s Server) Unbind(args *UnbindArgs, reply *UnbindReply) error {
+	if err := s.priv.Unbind(args.Destination); err != nil {
+		reply.Err = err.Error()
+	}
+	return nil
+}
+
+type ListArgs struct {
+}
+
+type ListReply struct {
+	BindList []string
+	// Errors types cannot be passed as-is in RPC so they
+	// must be converted to plain strings.
+	// Details at https://github.com/golang/go/issues/23340
+	Err string
+}
+
+func (s Server) List(args *ListArgs, reply *ListReply) error {
+	bindList, err := s.priv.List()
+	if err != nil {
+		reply.Err = err.Error()
+	}
+	reply.BindList = bindList
+	return nil
+}
diff --git a/hacksaw/bind/remoteserver_test.go b/hacksaw/bind/remoteserver_test.go
new file mode 100644
index 0000000..6f1eabd
--- /dev/null
+++ b/hacksaw/bind/remoteserver_test.go
@@ -0,0 +1,111 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bind
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"reflect"
+	"testing"
+)
+
+func TestServerBind(t *testing.T) {
+	mountTempDir, err := ioutil.TempDir("", "mount")
+	if err != nil {
+		t.Error(err)
+	}
+	fakeBinder := NewFakePathBinder()
+	server := NewServer(fakeBinder)
+	roSourceDir := path.Join(mountTempDir, "path/to/readonly/source")
+	if err = os.MkdirAll(roSourceDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	roDestDir := path.Join(mountTempDir, "path/to/hacksaw/readonly/destination")
+	if err = os.MkdirAll(roDestDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	bindROArgs := BindReadOnlyArgs{
+		Source:      roSourceDir,
+		Destination: roDestDir,
+	}
+	var bindROReply BindReadOnlyReply
+	if err := server.BindReadOnly(&bindROArgs, &bindROReply); err != nil {
+		t.Error(err)
+	}
+	if bindROReply.Err != "" {
+		t.Error(bindROReply.Err)
+	}
+	rwSourceDir := path.Join(mountTempDir, "path/to/readwrite/source")
+	if err = os.MkdirAll(rwSourceDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	rwDestDir := path.Join(mountTempDir, "path/to/hacksaw/readwrite/destination")
+	if err = os.MkdirAll(rwDestDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	bindRWArgs := BindReadWriteArgs{
+		Source:      rwSourceDir,
+		Destination: rwDestDir,
+	}
+	var bindRWReply BindReadWriteReply
+	if err := server.BindReadWrite(&bindRWArgs, &bindRWReply); err != nil {
+		t.Error(err)
+	}
+	if bindRWReply.Err != "" {
+		t.Error(bindRWReply.Err)
+	}
+	var listArgs ListArgs
+	var listReply ListReply
+	err = server.List(&listArgs, &listReply)
+	if err != nil {
+		t.Error(err)
+	}
+	if listReply.Err != "" {
+		t.Error(listReply.Err)
+	}
+	expectedList := []string{
+		roDestDir,
+		rwDestDir,
+	}
+	if !reflect.DeepEqual(listReply.BindList, expectedList) {
+		t.Errorf("Bind list %v is different than expected bind %v",
+			listReply.BindList, expectedList)
+	}
+	unbindArgs := UnbindArgs{
+		Destination: rwDestDir,
+	}
+	var unbindReply UnbindReply
+	if err := server.Unbind(&unbindArgs, &unbindReply); err != nil {
+		t.Error(err)
+	}
+	if unbindReply.Err != "" {
+		t.Error(unbindReply.Err)
+	}
+	err = server.List(&listArgs, &listReply)
+	if err != nil {
+		t.Error(err)
+	}
+	if listReply.Err != "" {
+		t.Error(listReply.Err)
+	}
+	expectedList = []string{
+		roDestDir,
+	}
+	if !reflect.DeepEqual(listReply.BindList, expectedList) {
+		t.Errorf("Bind list %v is different than expected bind %v",
+			listReply.BindList, expectedList)
+	}
+}
diff --git a/hacksaw/client/client.go b/hacksaw/client/client.go
new file mode 100644
index 0000000..4db9f2c
--- /dev/null
+++ b/hacksaw/client/client.go
@@ -0,0 +1,62 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package client implements the hacksaw cli client
+package client
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"reflect"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/config"
+)
+
+func ensureConfigFileExists(cfgPath string) error {
+	_, err := os.Stat(cfgPath)
+	if os.IsNotExist(err) {
+		//continue to init config file
+	} else {
+		return err
+	}
+
+	//init config file
+	if err = os.MkdirAll(path.Dir(cfgPath), os.ModePerm); err != nil {
+		return err
+	}
+	return ioutil.WriteFile(cfgPath, []byte("{}"), os.ModePerm)
+}
+
+func HandleCommand(workspaceTopDir string, pathBinder bind.PathBinder, args []string) error {
+	cfgPath := path.Join(workspaceTopDir, "config.json")
+	if err := ensureConfigFileExists(cfgPath); err != nil {
+		return err
+	}
+	cfg := config.GetConfig()
+	if err := cfg.ReadConfigFromFile(cfgPath); err != nil {
+		return err
+	}
+	//Save a copy of the config to detect changes
+	savedCfg := cfg.Copy()
+	cmd := NewCommand(pathBinder, workspaceTopDir)
+	if err := cmd.Handle(args); err != nil {
+		return err
+	}
+	if reflect.DeepEqual(savedCfg, cfg) {
+		return nil
+	}
+	return cfg.WriteConfigToFile(cfgPath)
+}
diff --git a/hacksaw/client/client_test.go b/hacksaw/client/client_test.go
new file mode 100644
index 0000000..90437c2
--- /dev/null
+++ b/hacksaw/client/client_test.go
@@ -0,0 +1,36 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package client
+
+import (
+	"io/ioutil"
+	"path"
+	"testing"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+)
+
+func TestClientHelp(t *testing.T) {
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	pathBinder := bind.NewFakePathBinder()
+	args := []string{"hacksaw", "help"}
+	if err = HandleCommand(wsTopDir, pathBinder, args); err != nil {
+		t.Error(err)
+	}
+}
diff --git a/hacksaw/client/command.go b/hacksaw/client/command.go
new file mode 100644
index 0000000..b06c2d4
--- /dev/null
+++ b/hacksaw/client/command.go
@@ -0,0 +1,247 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package client
+
+import (
+	"fmt"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/codebase"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/workspace"
+)
+
+const usage = `Usage: hacksaw <command> <options>
+
+Commands:
+	help: show this message
+	codebase add <name> <path>: Add codebase to hacksaw's list.
+	codebase list: List all codebases that hacksaw knows about.
+	codebase default <name>: Change the default codebase.
+	codebase remove <name>: Remove a codebase from hacksaw's list.
+	workspace new <workspace_name> <codebase_name>: Create a new workspace
+	  from a codebase. The codebase is optional if a default codebase
+	  has already been set.
+	workspace recreate <name>: Recreate an existing workspace.
+	  This will recreate any read-only bind mounts which may be necessary
+	  when a machine is rebooted.
+	workspace list: List all known workspaces.
+	workspace remove <name>: Remove a workspace.
+	edit <path>: Make a workspace path editable by checking out the parent git project.`
+
+type Command struct {
+	workspace workspace.Workspace
+}
+
+func NewCommand(bm bind.PathBinder, wtd string) Command {
+	return Command{workspace.New(bm, wtd)}
+}
+
+func (c Command) addCodebase(args []string) error {
+	if len(args) < 5 {
+		return fmt.Errorf("Codebase name and path are required\n"+
+			"Usage: %s %s %s <codebase_name> <path>",
+			args[0], args[1], args[2])
+	}
+	name := args[3]
+	path := args[4]
+	_, err := codebase.Add(name, path)
+	if err != nil {
+		return err
+	}
+	fmt.Println("Added codebase", name)
+	return err
+}
+
+func (c Command) defaultCodebase(args []string) error {
+	if len(args) < 4 {
+		return fmt.Errorf("Codebase name is required\n"+
+			"Usage: %s %s %s <codebase_name>",
+			args[0], args[1], args[2])
+	}
+	name := args[3]
+	if err := codebase.SetDefault(name); err != nil {
+		return err
+	}
+	fmt.Println("Default codebase set to", name)
+	return nil
+}
+
+func (c Command) listCodebases() {
+	def := codebase.Default()
+	if def == "" {
+		def = "None"
+	}
+	fmt.Println("Default codebase:")
+	fmt.Println("\t", def)
+
+	list := codebase.List()
+	fmt.Println("Codebases:")
+	for name, path := range list {
+		fmt.Println("\t", name, path)
+	}
+}
+
+func (c Command) removeCodebase(args []string) error {
+	if len(args) < 4 {
+		return fmt.Errorf("Codebase name required\n"+
+			"Usage: %s %s %s <codebase>",
+			args[0], args[1], args[2])
+	}
+	name := args[3]
+	_, err := codebase.Remove(name)
+	if err != nil {
+		return err
+	}
+	fmt.Println("Removed codebase", name)
+	return nil
+}
+
+func (c Command) createWorkspace(args []string) error {
+	var codebaseName string
+	defaultCodebase := codebase.Default()
+	switch len(args) {
+	case 4:
+		if defaultCodebase == "" {
+			return fmt.Errorf("Codebase name is required\n"+
+				"Usage: %s %s %s <name> <codebase>",
+				args[0], args[1], args[2])
+		} else {
+			codebaseName = defaultCodebase
+		}
+	case 5:
+		codebaseName = args[4]
+	default:
+		return fmt.Errorf("Unexpected number of arguments\n"+
+			"Usage: %s %s %s <name> <codebase>",
+			args[0], args[1], args[2])
+	}
+
+	workspaceName := args[3]
+	dir, err := c.workspace.Create(workspaceName, codebaseName)
+	if err != nil {
+		return err
+	}
+	fmt.Println("Created", workspaceName, "at", dir)
+	return nil
+}
+
+func (c Command) recreateWorkspace(args []string) error {
+	if len(args) < 4 {
+		return fmt.Errorf("Workspace name is required\n"+
+			"Usage: %s %s %s <name>",
+			args[0], args[1], args[2])
+	}
+
+	workspaceName := args[3]
+	dir, err := c.workspace.Recreate(workspaceName)
+	if err != nil {
+		return err
+	}
+	fmt.Println("Recreated", workspaceName, "at", dir)
+	return nil
+}
+
+func (c Command) listWorkspaces() {
+	list := c.workspace.List()
+	fmt.Println("Workspaces:")
+	for name, codebase := range list {
+		fmt.Println("\t", name, codebase)
+	}
+}
+
+func (c Command) removeWorkspace(args []string) error {
+	if len(args) < 4 {
+		return fmt.Errorf("Workspace name required\n"+
+			"Usage: %s %s %s <name>",
+			args[0], args[1], args[2])
+	}
+	name := args[3]
+	_, err := c.workspace.Remove(name)
+	if err != nil {
+		return err
+	}
+	fmt.Println("Removed workspace", name)
+	return nil
+}
+
+func (c Command) editProject(args []string) error {
+	if len(args) < 3 {
+		return fmt.Errorf("Edit path required\n"+
+			"Usage: %s %s <path>",
+			args[0], args[1])
+	}
+	path := args[2]
+	branchName, projectPath, err := c.workspace.Edit(path)
+	if err != nil {
+		return err
+	}
+	fmt.Printf("Created branch %s on project %s\n",
+		branchName, projectPath)
+	return nil
+}
+
+//Handle parses all command line arguments
+func (c Command) Handle(args []string) error {
+	if len(args) < 2 || args[1] == "help" {
+		fmt.Println(usage)
+		return nil
+	}
+
+	command := args[1]
+
+	switch command {
+	case "codebase", "cb":
+		if len(args) < 3 {
+			return fmt.Errorf("Not enough arguments for codebase command")
+		}
+		subcommand := args[2]
+		switch subcommand {
+		case "add":
+			return c.addCodebase(args)
+		case "remove", "rm":
+			return c.removeCodebase(args)
+		case "default", "def":
+			return c.defaultCodebase(args)
+		case "list", "ls":
+			c.listCodebases()
+			return nil
+		default:
+			return fmt.Errorf("Command \"%s %s\" not found", command, subcommand)
+		}
+	case "workspace", "ws":
+		if len(args) < 3 {
+			return fmt.Errorf("Not enough arguments for workspace command")
+		}
+		subcommand := args[2]
+		switch subcommand {
+		case "new":
+			return c.createWorkspace(args)
+		case "recreate":
+			return c.recreateWorkspace(args)
+		case "remove", "rm":
+			return c.removeWorkspace(args)
+		case "list", "ls":
+			c.listWorkspaces()
+			return nil
+		default:
+			return fmt.Errorf("Command \"%s %s\" not found", command, subcommand)
+		}
+	case "edit":
+		return c.editProject(args)
+	default:
+		return fmt.Errorf("Command \"%s\" not found", command)
+	}
+	return nil
+}
diff --git a/hacksaw/client/command_test.go b/hacksaw/client/command_test.go
new file mode 100644
index 0000000..ccdaf9d
--- /dev/null
+++ b/hacksaw/client/command_test.go
@@ -0,0 +1,395 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package client
+
+import (
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path"
+	"testing"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/codebase"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/config"
+)
+
+func TestHelpCommand(t *testing.T) {
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "help"}
+	err := cmd.Handle(args)
+	if err != nil {
+		t.Error(err)
+	}
+}
+
+func TestNoCommand(t *testing.T) {
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw"}
+	err := cmd.Handle(args)
+	if err != nil {
+		t.Error(err)
+	}
+}
+
+func TestBadCommand(t *testing.T) {
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "doesnotexist"}
+	if err := cmd.Handle(args); err == nil {
+		t.Errorf("Failed to handle bad command")
+	}
+}
+
+func TestEmptyCodebaseCommand(t *testing.T) {
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "codebase"}
+	if err := cmd.Handle(args); err == nil {
+		t.Errorf("Allowed an empty codebase command")
+	}
+}
+
+func TestAddCodebase(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "codebase", "add", "test-codebase", codebaseDir}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestDefaultCodebase(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "codebase", "add", "test-codebase", codebaseDir}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+	args = []string{"hacksaw", "codebase", "default", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd = NewCommand(bind.NewFakePathBinder(), wsTopDir)
+	args = []string{"hacksaw", "workspace", "new", "test-workspace"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestListCodebases(t *testing.T) {
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "codebase", "list"}
+	if err := cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestRemoveCodebase(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "codebase", "add", "test-codebase", codebaseDir}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+	args = []string{"hacksaw", "codebase", "remove", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestEmptyWorkspaceCommand(t *testing.T) {
+	cmd := NewCommand(bind.NewFakePathBinder(), "")
+	args := []string{"hacksaw", "workspace"}
+	if err := cmd.Handle(args); err == nil {
+		t.Errorf("Allowed an empty workspace command")
+	}
+}
+
+func TestCreateWorkspace(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := NewCommand(bind.NewFakePathBinder(), wsTopDir)
+	args := []string{"hacksaw", "workspace", "new", "test-workspace", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestRecreateWorkspace(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := NewCommand(bind.NewFakePathBinder(), wsTopDir)
+	args := []string{"hacksaw", "workspace", "new", "test-workspace", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+	args = []string{"hacksaw", "workspace", "recreate", "test-workspace"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+	args = []string{"hacksaw", "workspace", "recreate", "does-not-exist"}
+	if err = cmd.Handle(args); err == nil {
+		t.Error("Allowed to recreate an unexistant workspace")
+	}
+}
+
+func TestListWorkspace(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := NewCommand(bind.NewFakePathBinder(), wsTopDir)
+
+	args := []string{"hacksaw", "workspace", "new", "test-workspace", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+
+	args = []string{"hacksaw", "workspace", "list"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestRemoveWorkspace(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := NewCommand(bind.NewFakePathBinder(), wsTopDir)
+
+	args := []string{"hacksaw", "workspace", "new", "test-workspace", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+
+	args = []string{"hacksaw", "workspace", "remove", "test-workspace"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
+
+func TestEditWorkspace(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	projectDir := path.Join(codebaseDir, "project")
+	if err = os.MkdirAll(projectDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	gitCmd := exec.Command("git", "-C", projectDir, "init")
+	output, err := gitCmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			gitCmd.String(), err.Error(), output)
+	}
+	gitCmd = exec.Command("git", "-C", projectDir, "commit", `--message="Initial commit"`, "--allow-empty")
+	output, err = gitCmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			gitCmd.String(), err.Error(), output)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := NewCommand(bind.NewFakePathBinder(), wsTopDir)
+
+	args := []string{"hacksaw", "workspace", "new", "test-workspace", "test-codebase"}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+	wsProjectDir := path.Join(wsTopDir, "test-workspace", "project")
+	args = []string{"hacksaw", "edit", wsProjectDir}
+	if err = cmd.Handle(args); err != nil {
+		t.Error(err)
+	}
+}
diff --git a/hacksaw/cmd/hacksaw/main.go b/hacksaw/cmd/hacksaw/main.go
new file mode 100644
index 0000000..60d1a88
--- /dev/null
+++ b/hacksaw/cmd/hacksaw/main.go
@@ -0,0 +1,160 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// hacksaw let's you create lightweight workspaces of large codebases
+package main
+
+import (
+	"fmt"
+	"net"
+	"net/http"
+	"net/rpc"
+	"os"
+	"os/exec"
+	"os/user"
+	"path/filepath"
+	"strconv"
+	"syscall"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/client"
+)
+
+const hacksawdSocketPath = "/var/run/hacksaw.sock"
+const tmpSocketPath = "/tmp/hacksaw.sock"
+
+func getPathBinder() bind.PathBinder {
+	if os.Geteuid() == 0 {
+		// Called by root without SUDO_USER
+		// Most likely in a mount namepace
+		return bind.NewLocalPathBinder()
+	}
+	_, err := os.Stat(tmpSocketPath)
+	if err == nil {
+		return bind.NewRemoteBindClient(tmpSocketPath)
+	} else {
+		return bind.NewRemoteBindClient(hacksawdSocketPath)
+	}
+}
+
+func getWorkspaceTopDir() (string, error) {
+	home, err := os.UserHomeDir()
+	if err != nil {
+		return "", err
+	}
+	// The hacksaw mount daemon requires all mounts
+	// to be contained in a directory named "hacksaw"
+	topDir := filepath.Join(home, "hacksaw")
+	_, err = os.Stat(topDir)
+	if err == nil {
+		// expected case
+	} else if os.IsNotExist(err) {
+		return topDir, nil
+	} else {
+		return "", err
+	}
+	topDir, err = filepath.EvalSymlinks(topDir)
+	if err != nil {
+		return "", err
+	}
+	return topDir, nil
+}
+
+func dropPrivileges(sudoUser string, socketPath string) error {
+	usr, err := user.Lookup(sudoUser)
+	if err != nil {
+		return err
+	}
+	sudoUid, err := strconv.ParseUint(usr.Uid, 10, 32)
+	if err != nil {
+		return err
+	}
+	sudoGid, err := strconv.ParseUint(usr.Gid, 10, 32)
+	if err != nil {
+		return err
+	}
+	if err = os.Chown(socketPath, int(sudoUid), int(sudoGid)); err != nil {
+		return err
+	}
+	cmd := exec.Command(os.Args[0], os.Args[1:]...)
+	cmd.Stdout = os.Stdout
+	cmd.Stderr = os.Stderr
+	cmd.Env = append(os.Environ(), "HOME="+usr.HomeDir)
+	cmd.SysProcAttr = &syscall.SysProcAttr{
+		Credential: &syscall.Credential{
+			Uid: uint32(sudoUid),
+			Gid: uint32(sudoGid),
+		},
+		Setsid: true,
+	}
+	if err := cmd.Start(); err != nil {
+		return err
+	}
+	_, err = cmd.Process.Wait()
+	if err != nil {
+		return err
+	}
+	if err = cmd.Process.Release(); err != nil {
+		return err
+	}
+	return nil
+}
+
+func createPathBinderListener(socketPath string) (net.Listener, error) {
+	listener, err := net.Listen("unix", socketPath)
+	if err != nil {
+		return nil, err
+	}
+
+	binder := bind.NewLocalPathBinder()
+	server := bind.NewServer(binder)
+	if err = rpc.Register(server); err != nil {
+		return nil, err
+	}
+	rpc.HandleHTTP()
+	return listener, nil
+}
+
+func handleSudoUser(sudoUser string) error {
+	if err := os.RemoveAll(tmpSocketPath); err != nil {
+		return err
+	}
+	listener, err := createPathBinderListener(tmpSocketPath)
+	if err != nil {
+		return err
+	}
+	defer os.RemoveAll(tmpSocketPath)
+	go http.Serve(listener, nil)
+	return dropPrivileges(sudoUser, tmpSocketPath)
+}
+
+func run(args []string) error {
+	sudoUser := os.Getenv("SUDO_USER")
+	if os.Geteuid() == 0 && sudoUser != "" {
+		return handleSudoUser(sudoUser)
+	}
+	workspaceTopDir, err := getWorkspaceTopDir()
+	if err != nil {
+		return err
+	}
+	pathBinder := getPathBinder()
+	return client.HandleCommand(workspaceTopDir, pathBinder, args)
+}
+
+func main() {
+	if err := run(os.Args); err != nil {
+		fmt.Println("Error:", err)
+		os.Exit(1)
+	}
+}
diff --git a/hacksaw/cmd/hacksaw/main_test.go b/hacksaw/cmd/hacksaw/main_test.go
new file mode 100644
index 0000000..09669ab
--- /dev/null
+++ b/hacksaw/cmd/hacksaw/main_test.go
@@ -0,0 +1,39 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"io/ioutil"
+	"os"
+	"testing"
+)
+
+func TestHelp(t *testing.T) {
+	homeDir, err := ioutil.TempDir("", "hacksaw_test_home")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(homeDir)
+	if err = os.Setenv("HOME", homeDir); err != nil {
+		t.Error(err)
+	}
+	args := []string{
+		"hacksaw",
+		"help",
+	}
+	if err = run(args); err != nil {
+		t.Error(err)
+	}
+}
diff --git a/hacksaw/cmd/hacksawd/main.go b/hacksaw/cmd/hacksawd/main.go
new file mode 100644
index 0000000..8f1ad1a
--- /dev/null
+++ b/hacksaw/cmd/hacksawd/main.go
@@ -0,0 +1,52 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// hacksawd is a privileged daemon that manages the mounts
+package main
+
+import (
+	"net"
+	"net/http"
+	"net/rpc"
+	"os"
+	"strconv"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+)
+
+func main() {
+	if os.Getenv("LISTEN_PID") != strconv.Itoa(os.Getpid()) {
+		panic("Unexpected PID")
+	}
+
+	if os.Getenv("LISTEN_FDS") != strconv.Itoa(1) {
+		panic("Unexpected number of socket fds")
+	}
+
+	const socketFD = 3
+	socketFile := os.NewFile(socketFD, "hacksawd.sock")
+
+	listener, err := net.FileListener(socketFile)
+	if err != nil {
+		panic(err)
+	}
+
+	binder := bind.NewLocalPathBinder()
+	server := bind.NewServer(binder)
+	if err = rpc.Register(server); err != nil {
+		panic(err)
+	}
+	rpc.HandleHTTP()
+	http.Serve(listener, nil)
+}
diff --git a/hacksaw/codebase/codebase.go b/hacksaw/codebase/codebase.go
new file mode 100644
index 0000000..9c38b91
--- /dev/null
+++ b/hacksaw/codebase/codebase.go
@@ -0,0 +1,92 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package codebase let's you manage codebases
+package codebase
+
+import (
+	"fmt"
+	"os"
+	"path/filepath"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/config"
+)
+
+// Add a codebase to the list of supported codebases
+func Add(name string, path string) (*config.Config, error) {
+	absPath, err := filepath.Abs(path)
+	if err != nil {
+		return nil, err
+	}
+	//check that the codebase path is valid before adding
+	_, err = os.Stat(absPath)
+	if err != nil {
+		return nil, err
+	}
+	cfg := config.GetConfig()
+	if _, ok := cfg.Codebases[name]; ok {
+		return cfg, fmt.Errorf("Codebase %s already exists", name)
+	}
+	cfg.Codebases[name] = absPath
+	return cfg, err
+}
+
+// Remove an existing codebase
+func Remove(remove string) (*config.Config, error) {
+	cfg := config.GetConfig()
+	_, ok := cfg.Codebases[remove]
+	if !ok {
+		return nil, fmt.Errorf("Codebase %s not found", remove)
+	}
+	delete(cfg.Codebases, remove)
+	if cfg.DefaultCodebase == remove {
+		cfg.DefaultCodebase = ""
+	}
+	return cfg, nil
+}
+
+// Default gets the default codebase
+func Default() string {
+	cfg := config.GetConfig()
+	def := cfg.DefaultCodebase
+	return def
+}
+
+// SetDefault sets the default codebase
+func SetDefault(def string) error {
+	cfg := config.GetConfig()
+	_, ok := cfg.Codebases[def]
+	if !ok {
+		return fmt.Errorf("Codebase %s not found", def)
+	}
+	cfg.DefaultCodebase = def
+	return nil
+}
+
+// List all supported codebases
+func List() map[string]string {
+	cfg := config.GetConfig()
+	return cfg.Codebases
+}
+
+// GetDir retrieves the directory of a specific workspace
+func GetDir(codebase string) (string, error) {
+	cfg := config.GetConfig()
+	dir, ok := cfg.Codebases[codebase]
+	if !ok {
+		return dir, fmt.Errorf("Codebase %s not found",
+			codebase)
+	}
+	return dir, nil
+}
diff --git a/hacksaw/codebase/codebase_test.go b/hacksaw/codebase/codebase_test.go
new file mode 100644
index 0000000..16442b0
--- /dev/null
+++ b/hacksaw/codebase/codebase_test.go
@@ -0,0 +1,217 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package codebase
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"reflect"
+	"testing"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/config"
+)
+
+func TestAdd(t *testing.T) {
+	defer config.GetConfig().Reset()
+	firstCodebaseDir, err := ioutil.TempDir("", "first")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(firstCodebaseDir)
+	outputConfig, err := Add("first-codebase", firstCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	expectedConfig := config.Config{
+		DefaultCodebase: "",
+		Codebases: map[string]string{
+			"first-codebase": firstCodebaseDir,
+		},
+		Workspaces: map[string]string{}}
+	if !reflect.DeepEqual(expectedConfig, *outputConfig) {
+		t.Errorf("Expected config %v different than output config %v",
+			expectedConfig, *outputConfig)
+	}
+
+	secondCodebaseDir, err := ioutil.TempDir("", "second")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(secondCodebaseDir)
+	if outputConfig, err = Add("second-codebase", secondCodebaseDir); err != nil {
+		t.Error(err)
+	}
+	expectedConfig = config.Config{
+		DefaultCodebase: "",
+		Codebases: map[string]string{
+			"first-codebase":  firstCodebaseDir,
+			"second-codebase": secondCodebaseDir,
+		},
+		Workspaces: map[string]string{}}
+	if !reflect.DeepEqual(expectedConfig, *outputConfig) {
+		t.Errorf("Expected config %v different than output config %v",
+			expectedConfig, *outputConfig)
+	}
+}
+
+func TestAddDuplicate(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "first")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	_, err = Add("codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = Add("codebase", codebaseDir)
+	if err == nil {
+		t.Error("Allowed duplicate codebase")
+	}
+}
+
+func TestAddInvalidCodebase(t *testing.T) {
+	defer config.GetConfig().Reset()
+	tempDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(tempDir)
+	testCodebaseDir := path.Join(tempDir, "doesnotexist")
+	_, err = Add("test-codebase", testCodebaseDir)
+	if err == nil {
+		t.Error("Adding a codebase with an invalid directory should not be allowed")
+	}
+}
+
+func TestList(t *testing.T) {
+	defer config.GetConfig().Reset()
+	firstCodebaseDir, err := ioutil.TempDir("", "first")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(firstCodebaseDir)
+	_, err = Add("first-codebase", firstCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	secondCodebaseDir, err := ioutil.TempDir("", "second")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(secondCodebaseDir)
+	if _, err = Add("second-codebase", secondCodebaseDir); err != nil {
+		t.Error(err)
+	}
+	list := List()
+	expectedList := map[string]string{
+		"first-codebase":  firstCodebaseDir,
+		"second-codebase": secondCodebaseDir,
+	}
+	if !reflect.DeepEqual(expectedList, list) {
+		t.Errorf("Codebase list %v is different than expected list %v",
+			list, expectedList)
+	}
+}
+
+func TestSetGetDefault(t *testing.T) {
+	defer config.GetConfig().Reset()
+	firstCodebaseDir, err := ioutil.TempDir("", "first")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(firstCodebaseDir)
+	_, err = Add("first-codebase", firstCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	secondCodebaseDir, err := ioutil.TempDir("", "second")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(secondCodebaseDir)
+	_, err = Add("second-codebase", secondCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	if err = SetDefault("second-codebase"); err != nil {
+		t.Error(err)
+	}
+	def := Default()
+	if def != "second-codebase" {
+		t.Error("Setting default codebase failed")
+	}
+}
+
+func TestBadDefault(t *testing.T) {
+	defer config.GetConfig().Reset()
+	testCodebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(testCodebaseDir)
+	_, err = Add("test-codebase", testCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	if err = SetDefault("does-not-exist"); err == nil {
+		t.Error("Allowed invalid default codebase")
+	}
+}
+
+func TestRemove(t *testing.T) {
+	defer config.GetConfig().Reset()
+	testCodebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(testCodebaseDir)
+	_, err = Add("remove-me", testCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	SetDefault("remove-me")
+	cfg, err := Remove("remove-me")
+	if err != nil {
+		t.Error(err)
+	}
+	_, ok := cfg.Codebases["remove-me"]
+	if ok {
+		t.Error("Removed codebase is still in the configuration")
+	}
+	if cfg.DefaultCodebase != "" {
+		t.Error("Removing the default codebase did not reset the default")
+	}
+}
+
+func TestBadRemove(t *testing.T) {
+	defer config.GetConfig().Reset()
+	testCodebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(testCodebaseDir)
+	_, err = Add("remove-me", testCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = Remove("does-not-exist")
+	if err == nil {
+		t.Error("Attempt to remove an unexistant codebase",
+			"should have failed")
+	}
+}
diff --git a/hacksaw/config/config.go b/hacksaw/config/config.go
new file mode 100644
index 0000000..83a0ffd
--- /dev/null
+++ b/hacksaw/config/config.go
@@ -0,0 +1,109 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package config reads and writes hacksaw configuration data to io
+package config
+
+import (
+	"encoding/json"
+	"io"
+	"io/ioutil"
+	"os"
+	"sync"
+)
+
+type Config struct {
+	DefaultCodebase string
+	Codebases       map[string]string //key: name, value: path
+	Workspaces      map[string]string //key: name, value: codebase
+}
+
+//Read the configuration from an io.Reader
+func (c *Config) Read(input io.Reader) error {
+	cfgBytes, err := ioutil.ReadAll(input)
+	if err != nil {
+		return err
+	}
+	return json.Unmarshal(cfgBytes, &c)
+}
+
+//Write the configuration to an io.Writer
+func (c Config) Write(output io.Writer) error {
+	cfgBytes, err := json.MarshalIndent(c, "", " ")
+	if err != nil {
+		return err
+	}
+	_, err = output.Write(cfgBytes)
+	return err
+}
+
+func (c *Config) ReadConfigFromFile(filePath string) error {
+	_, err := os.Stat(filePath)
+	if err != nil {
+		return err
+	}
+	cfgFile, err := os.Open(filePath)
+	if err != nil {
+		return err
+	}
+	defer cfgFile.Close()
+	err = c.Read(cfgFile)
+	return err
+}
+
+func (c Config) WriteConfigToFile(filePath string) error {
+	cfgFile, err := os.Create(filePath)
+	if err != nil {
+		return err
+	}
+	defer cfgFile.Close()
+	return c.Write(cfgFile)
+}
+
+//Config gets a copy of the config
+func (c Config) Copy() Config {
+	cfgCopy := Config{
+		DefaultCodebase: c.DefaultCodebase,
+		Codebases:       map[string]string{},
+		Workspaces:      map[string]string{}}
+	for name, path := range c.Codebases {
+		cfgCopy.Codebases[name] = path
+	}
+	for name, codebase := range c.Workspaces {
+		cfgCopy.Workspaces[name] = codebase
+	}
+	return cfgCopy
+}
+
+//Reset sets the config to zero values
+func (c *Config) Reset() {
+	*c = Config{
+		DefaultCodebase: "",
+		Codebases:       map[string]string{},
+		Workspaces:      map[string]string{}}
+}
+
+var singleton *Config
+var once sync.Once
+
+//Config gets the singleton config instance
+func GetConfig() *Config {
+	once.Do(func() {
+		singleton = &Config{
+			DefaultCodebase: "",
+			Codebases:       map[string]string{},
+			Workspaces:      map[string]string{}}
+	})
+	return singleton
+}
diff --git a/hacksaw/config/config_test.go b/hacksaw/config/config_test.go
new file mode 100644
index 0000000..e010022
--- /dev/null
+++ b/hacksaw/config/config_test.go
@@ -0,0 +1,74 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package config
+
+import (
+	"bytes"
+	"reflect"
+	"testing"
+)
+
+const testWriteExpectedOutput = `{
+ "DefaultCodebase": "test-codebase",
+ "Codebases": {
+  "test-codebase": "/path/to/test/codebase"
+ },
+ "Workspaces": {}
+}`
+
+func TestWrite(t *testing.T) {
+	inputConfig := Config{
+		DefaultCodebase: "test-codebase",
+		Codebases: map[string]string{
+			"test-codebase": "/path/to/test/codebase",
+		},
+		Workspaces: map[string]string{}}
+	var outputBuffer bytes.Buffer
+	if err := inputConfig.Write(&outputBuffer); err != nil {
+		t.Error(err)
+	}
+	expectedOutput := []byte(testWriteExpectedOutput)
+	if bytes.Compare(outputBuffer.Bytes(), expectedOutput) != 0 {
+		t.Errorf("Output %s is different that expected output %s",
+			string(outputBuffer.Bytes()), string(expectedOutput))
+	}
+}
+
+const testReadInput = `{
+ "DefaultCodebase": "test-codebase",
+ "Codebases": {
+  "test-codebase": "/path/to/test/codebase"
+ },
+ "Workspaces": {}
+}`
+
+func TestRead(t *testing.T) {
+	inputBytes := []byte(testReadInput)
+	inputBuffer := bytes.NewBuffer(inputBytes)
+	var outputConfig Config
+	if err := outputConfig.Read(inputBuffer); err != nil {
+		t.Error(err)
+	}
+	expectedOutput := Config{
+		DefaultCodebase: "test-codebase",
+		Codebases: map[string]string{
+			"test-codebase": "/path/to/test/codebase",
+		},
+		Workspaces: map[string]string{}}
+	if !reflect.DeepEqual(outputConfig, expectedOutput) {
+		t.Errorf("Written config %v is different than read config %v",
+			outputConfig, expectedOutput)
+	}
+}
diff --git a/hacksaw/git/lister.go b/hacksaw/git/lister.go
new file mode 100644
index 0000000..44cb3e7
--- /dev/null
+++ b/hacksaw/git/lister.go
@@ -0,0 +1,19 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package git
+
+type ProjectLister interface {
+	List(codebasePath string) ([]string, error)
+}
diff --git a/hacksaw/git/repo.go b/hacksaw/git/repo.go
new file mode 100644
index 0000000..b7887ea
--- /dev/null
+++ b/hacksaw/git/repo.go
@@ -0,0 +1,47 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package git
+
+import (
+	"bufio"
+	"os"
+	"path"
+)
+
+type repoLister struct {
+}
+
+func NewRepoLister() ProjectLister {
+	var rl repoLister
+	return &rl
+}
+
+func (rl repoLister) List(codebasePath string) ([]string, error) {
+	projectList := []string{".repo"}
+	listPath := path.Join(codebasePath, ".repo/project.list")
+	listFile, err := os.Open(listPath)
+	if err != nil {
+		return projectList, err
+	}
+	defer listFile.Close()
+	scanner := bufio.NewScanner(listFile)
+	for scanner.Scan() {
+		line := scanner.Text()
+		if line != "" {
+			projectList = append(projectList, line)
+		}
+	}
+	return projectList, err
+}
diff --git a/hacksaw/git/repo_test.go b/hacksaw/git/repo_test.go
new file mode 100644
index 0000000..394bf44
--- /dev/null
+++ b/hacksaw/git/repo_test.go
@@ -0,0 +1,55 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package git
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"reflect"
+	"testing"
+)
+
+func TestList(t *testing.T) {
+	testCodebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	if err = os.Mkdir(path.Join(testCodebaseDir, ".repo"), os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte(
+		"path/to/project1\n" +
+			"path/to/project2\n" +
+			"path/to/project3")
+	listFile := path.Join(testCodebaseDir, ".repo/project.list")
+	if err = ioutil.WriteFile(listFile, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	lister := NewRepoLister()
+	projectList, err := lister.List(testCodebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	expectedList := []string{
+		".repo",
+		"path/to/project1",
+		"path/to/project2",
+		"path/to/project3",
+	}
+	if !reflect.DeepEqual(projectList, expectedList) {
+		t.Errorf("Got list %v but expected %v", projectList, expectedList)
+	}
+}
diff --git a/hacksaw/go.mod b/hacksaw/go.mod
new file mode 100644
index 0000000..65a6d50
--- /dev/null
+++ b/hacksaw/go.mod
@@ -0,0 +1,3 @@
+module android.googlesource.com/platform/tools/treble.git/hacksaw
+
+go 1.15
diff --git a/hacksaw/images/workspace-diagram.png b/hacksaw/images/workspace-diagram.png
new file mode 100644
index 0000000..cea44be
--- /dev/null
+++ b/hacksaw/images/workspace-diagram.png
Binary files differ
diff --git a/hacksaw/mount/fake.go b/hacksaw/mount/fake.go
new file mode 100644
index 0000000..a2d059c
--- /dev/null
+++ b/hacksaw/mount/fake.go
@@ -0,0 +1,66 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package mount
+
+import (
+	"fmt"
+)
+
+type MountEntry struct {
+	Source string
+	FSType string
+	Flags  uintptr
+	Data   string
+}
+
+type fakeMounter struct {
+	mountMap map[string]MountEntry //keyed by target
+}
+
+func NewFakeMounter() *fakeMounter {
+	var f fakeMounter
+	f.mountMap = make(map[string]MountEntry)
+	return &f
+}
+
+func (f *fakeMounter) Mount(source string, target string, fstype string, flags uintptr, data string) error {
+	//Using the target as the key prevents more
+	//than one source mapping to the same target
+	f.mountMap[target] = MountEntry{
+		Source: source,
+		FSType: fstype,
+		Flags:  flags,
+		Data:   data,
+	}
+	return nil
+}
+
+func (f *fakeMounter) Unmount(target string, flags int) error {
+	_, ok := f.mountMap[target]
+	if !ok {
+		return fmt.Errorf("Mount %s not found", target)
+	}
+	delete(f.mountMap, target)
+	return nil
+}
+
+func (f *fakeMounter) List() ([]string, error) {
+	var list []string
+	for target, _ := range f.mountMap {
+		list = append(list, target)
+	}
+
+	return list, nil
+}
diff --git a/hacksaw/mount/mount.go b/hacksaw/mount/mount.go
new file mode 100644
index 0000000..f301eb0
--- /dev/null
+++ b/hacksaw/mount/mount.go
@@ -0,0 +1,21 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package mount
+
+type Mounter interface {
+	Mount(string, string, string, uintptr, string) error
+	Unmount(string, int) error
+	List() ([]string, error)
+}
diff --git a/hacksaw/mount/system.go b/hacksaw/mount/system.go
new file mode 100644
index 0000000..1d8bfbd
--- /dev/null
+++ b/hacksaw/mount/system.go
@@ -0,0 +1,80 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package mount
+
+import (
+	"bufio"
+	"io"
+	"os"
+	"strings"
+	"syscall"
+)
+
+type systemMounter struct {
+}
+
+func NewSystemMounter() *systemMounter {
+	var f systemMounter
+	return &f
+}
+
+func (f *systemMounter) Mount(source string, target string, fstype string, flags uintptr, data string) error {
+	return syscall.Mount(source, target, fstype, flags, data)
+}
+
+func (f *systemMounter) Unmount(target string, flags int) error {
+	return syscall.Unmount(target, flags)
+}
+
+func (f *systemMounter) List() ([]string, error) {
+	mountsFile, err := os.Open("/proc/mounts")
+	if err != nil {
+		return nil, err
+	}
+	defer mountsFile.Close()
+	mounts, err := f.parseMounts(mountsFile)
+	if err != nil {
+		return nil, err
+	}
+	var mountList []string
+	for _, mount := range mounts {
+		mountList = append(mountList, mount.Path)
+	}
+	return mountList, err
+}
+
+type Mount struct {
+	Device string
+	Path   string
+	Type   string
+	Opts   string
+}
+
+func (f *systemMounter) parseMounts(mountSource io.Reader) ([]Mount, error) {
+	var mounts []Mount
+	scanner := bufio.NewScanner(mountSource)
+	for scanner.Scan() {
+		line := scanner.Text()
+		fields := strings.Fields(line)
+		mount := Mount{
+			Device: fields[0],
+			Path:   fields[1],
+			Type:   fields[2],
+			Opts:   fields[3],
+		}
+		mounts = append(mounts, mount)
+	}
+	return mounts, scanner.Err()
+}
diff --git a/hacksaw/scripts/hacksaw.service b/hacksaw/scripts/hacksaw.service
new file mode 100644
index 0000000..513ef30
--- /dev/null
+++ b/hacksaw/scripts/hacksaw.service
@@ -0,0 +1,11 @@
+[Unit]
+Description=Hacksaw service
+ConditionPathExists=/usr/local/bin/hacksawd
+
+[Service]
+# Runs as root by default
+Type=simple
+ExecStart=/usr/local/bin/hacksawd
+
+[Install]
+RequiredBy=hacksaw.socket
diff --git a/hacksaw/scripts/hacksaw.socket b/hacksaw/scripts/hacksaw.socket
new file mode 100644
index 0000000..46bec93
--- /dev/null
+++ b/hacksaw/scripts/hacksaw.socket
@@ -0,0 +1,8 @@
+[Socket]
+ListenStream=/var/run/hacksaw.sock
+SocketMode=0660
+SocketUser=root
+SocketGroup=hacksaw
+
+[Install]
+WantedBy=sockets.target
diff --git a/hacksaw/scripts/install-service.sh b/hacksaw/scripts/install-service.sh
new file mode 100755
index 0000000..7229288
--- /dev/null
+++ b/hacksaw/scripts/install-service.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+set -e
+set -v
+
+SCRIPT_DIR=$(dirname $(readlink --canonicalize $0))
+
+# This script must be run with sudo
+
+# The hacksaw group is used to manage access to the
+# hacksawd service
+groupadd hacksaw
+usermod -a -G hacksaw ${SUDO_USER}
+cp ${SCRIPT_DIR}/hacksaw.service /lib/systemd/system/hacksaw.service
+cp ${SCRIPT_DIR}/hacksaw.socket /lib/systemd/system/hacksaw.socket
+chmod 755 /lib/systemd/system/hacksaw.*
+systemctl enable hacksaw.service
+systemctl enable hacksaw.socket
+# hacksaw.socket starts hacksaw.service on-demand
+# so hacksaw.service does not need to be explicitly
+# started
+systemctl start hacksaw.socket
+echo "The installation scripts creates a new 'hacksaw' group and adds you to it."
+echo "You will need to log out and log back in for the group changes to take effect."
diff --git a/hacksaw/scripts/uninstall-service.sh b/hacksaw/scripts/uninstall-service.sh
new file mode 100755
index 0000000..e617708
--- /dev/null
+++ b/hacksaw/scripts/uninstall-service.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -e
+set -v
+
+SCRIPT_DIR=$(dirname $(readlink --canonicalize $0))
+
+# This script must be run with sudo
+
+systemctl stop hacksaw.socket
+systemctl stop hacksaw.service
+systemctl disable hacksaw.service
+systemctl disable hacksaw.socket
+rm /lib/systemd/system/hacksaw.service
+rm /lib/systemd/system/hacksaw.socket
+deluser ${SUDO_USER} hacksaw
+groupdel hacksaw
diff --git a/hacksaw/workspace/compose.go b/hacksaw/workspace/compose.go
new file mode 100644
index 0000000..f1cc2c8
--- /dev/null
+++ b/hacksaw/workspace/compose.go
@@ -0,0 +1,151 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workspace
+
+import (
+	"fmt"
+	"io"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/git"
+)
+
+type Composer struct {
+	pathBinder bind.PathBinder
+}
+
+func NewComposer(bm bind.PathBinder) Composer {
+	return Composer{bm}
+}
+
+func isDirEmpty(name string) (bool, error) {
+	dir, err := os.Open(name)
+	if err != nil {
+		return false, err
+	}
+	defer dir.Close()
+	_, err = dir.Readdirnames(1)
+	if err == io.EOF {
+		return true, nil
+	}
+	return false, err
+}
+
+//Compose a workspace from a codebase
+//Returns a list of path binds in the order they
+//were bound
+func (m Composer) Compose(codebasePath string, workspacePath string) ([]string, error) {
+	lister := git.NewRepoLister()
+	gitProjects, err := lister.List(codebasePath)
+	if err != nil {
+		return nil, err
+	}
+	fmt.Print("Composing")
+	var bindList []string
+	//Sorting the list of projects in alphabetical
+	//order ensures that parent projects are bound
+	//before their nested child projects, which is important
+	//to avoid bind conflicts
+	sort.Strings(gitProjects)
+	for _, project := range gitProjects {
+		fmt.Print(".") //Display some progress
+		//skip empty project names
+		if project == "" {
+			continue
+		}
+		source := filepath.Join(codebasePath, project)
+		destination := filepath.Join(workspacePath, project)
+		if err = os.MkdirAll(destination, os.ModePerm); err != nil {
+			fmt.Print("\n")
+			return bindList, err
+		}
+		isEmpty, err := isDirEmpty(destination)
+		if err != nil {
+			return bindList, err
+		}
+		if !isEmpty {
+			// If the destination dir already existed and
+			// was not empty then assume we are recreating
+			// a workspace and the current path already
+			// existed in the workspace
+			continue
+		}
+		if err = m.pathBinder.BindReadOnly(source, destination); err != nil {
+			fmt.Print("\n")
+			return bindList, err
+		}
+		bindList = append(bindList, destination)
+	}
+	fmt.Print("\n")
+	fmt.Println("Workspace composed")
+	copier := NewFileCopier()
+	return bindList, copier.Copy(codebasePath, gitProjects, workspacePath)
+}
+
+//Dismantle a workspace
+//Returns a list of path unbinds in the order they
+//were unbound
+func (m Composer) Dismantle(dismantlePath string) ([]string, error) {
+	bindList, err := m.List(dismantlePath)
+	if err != nil {
+		return nil, err
+	}
+	//Sorting the list of binds in reverse alphabetical
+	//order ensures that nested child projects are unbound
+	//before their parent projects, which is important
+	//to avoid unbind conflicts
+	sort.Sort(sort.Reverse(sort.StringSlice(bindList)))
+	fmt.Print("Dismantling")
+	var unbindList []string
+	for _, bindPath := range bindList {
+		fmt.Print(".") //Display some progress
+		if err = m.pathBinder.Unbind(bindPath); err != nil {
+			fmt.Print("\n")
+			return unbindList, err
+		}
+		unbindList = append(unbindList, bindPath)
+	}
+	fmt.Print("\n")
+	fmt.Println("Workspace dismantled")
+	return unbindList, err
+}
+
+//Unbind a project
+func (m Composer) Unbind(unbindPath string) error {
+	return m.pathBinder.Unbind(unbindPath)
+}
+
+//List all binds attached under a directory
+func (m Composer) List(listPath string) ([]string, error) {
+	listPath, err := filepath.EvalSymlinks(listPath)
+	if err != nil {
+		return nil, err
+	}
+	fullBindList, err := m.pathBinder.List()
+	if err != nil {
+		return nil, err
+	}
+	var matchBindList []string
+	for _, bindPath := range fullBindList {
+		if strings.HasPrefix(bindPath+"/", listPath+"/") {
+			matchBindList = append(matchBindList, bindPath)
+		}
+	}
+	return matchBindList, err
+}
diff --git a/hacksaw/workspace/compose_test.go b/hacksaw/workspace/compose_test.go
new file mode 100644
index 0000000..087499c
--- /dev/null
+++ b/hacksaw/workspace/compose_test.go
@@ -0,0 +1,158 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Ve
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workspace
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"reflect"
+	"testing"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+)
+
+func TestBasicCompose(t *testing.T) {
+	var err error
+	var codebaseDir string
+	if codebaseDir, err = ioutil.TempDir("", "codebase"); err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	var wsTempDir string
+	if wsTempDir, err = ioutil.TempDir("", "workspace"); err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	fakePathBinder := bind.NewFakePathBinder()
+	wsComposer := NewComposer(fakePathBinder)
+	list, err := wsComposer.Compose(codebaseDir, wsTopDir)
+	if err != nil {
+		t.Error(err)
+	}
+	expectedList := []string{
+		path.Join(wsTopDir, ".repo"),
+		path.Join(wsTopDir, "project"),
+	}
+	if !reflect.DeepEqual(expectedList, list) {
+		t.Errorf("Expected a list of binds %v and got %v", expectedList, list)
+	}
+	list, err = wsComposer.Dismantle(wsTopDir)
+	if err != nil {
+		t.Error(err)
+	}
+	expectedList = []string{
+		path.Join(wsTopDir, "project"),
+		path.Join(wsTopDir, ".repo"),
+	}
+	if !reflect.DeepEqual(expectedList, list) {
+		t.Errorf("Expected a list of unbinds %v and got %v", expectedList, list)
+	}
+}
+
+const projectListContents = `project2/subprojectb
+project1
+project2
+project1/subprojecta`
+
+func TestBindOrder(t *testing.T) {
+	var err error
+	var codebaseDir string
+	if codebaseDir, err = ioutil.TempDir("", "codebase"); err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	project2Git := path.Join(codebaseDir, "project2", ".git")
+	if err = os.MkdirAll(project2Git, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	project1Git := path.Join(codebaseDir, "project1", ".git")
+	if err = os.MkdirAll(project1Git, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	subProjectAGit := path.Join(codebaseDir, "project1", "subprojecta", ".git")
+	if err = os.MkdirAll(subProjectAGit, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	subProjectBGit := path.Join(codebaseDir, "project2", "subprojectb", ".git")
+	if err = os.MkdirAll(subProjectBGit, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte(projectListContents)
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	var wsTempDir string
+	if wsTempDir, err = ioutil.TempDir("", "workspace"); err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	fakePathBinder := bind.NewFakePathBinder()
+	wsComposer := NewComposer(fakePathBinder)
+	bindList, err := wsComposer.Compose(codebaseDir, wsTopDir)
+	if err != nil {
+		t.Error(err)
+	}
+	expectedList := []string{
+		path.Join(wsTopDir, ".repo"),
+		path.Join(wsTopDir, "project1"),
+		path.Join(wsTopDir, "project1", "subprojecta"),
+		path.Join(wsTopDir, "project2"),
+		path.Join(wsTopDir, "project2", "subprojectb"),
+	}
+	if !reflect.DeepEqual(expectedList, bindList) {
+		t.Errorf("Expected a list of binds %v and got %v", expectedList, bindList)
+	}
+	unbindList, err := wsComposer.Dismantle(wsTopDir)
+	if err != nil {
+		t.Error(err)
+	}
+	expectedList = []string{
+		path.Join(wsTopDir, "project2", "subprojectb"),
+		path.Join(wsTopDir, "project2"),
+		path.Join(wsTopDir, "project1", "subprojecta"),
+		path.Join(wsTopDir, "project1"),
+		path.Join(wsTopDir, ".repo"),
+	}
+	if !reflect.DeepEqual(expectedList, unbindList) {
+		t.Errorf("Expected a list of unbinds %v and got %v", expectedList, unbindList)
+	}
+}
diff --git a/hacksaw/workspace/copier.go b/hacksaw/workspace/copier.go
new file mode 100644
index 0000000..f90edc8
--- /dev/null
+++ b/hacksaw/workspace/copier.go
@@ -0,0 +1,241 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workspace
+
+import (
+	"fmt"
+	"io"
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"strings"
+)
+
+type FileCopier struct {
+}
+
+func NewFileCopier() *FileCopier {
+	var f FileCopier
+	return &f
+}
+
+func (f FileCopier) GetIsGitProjectFunc(codebaseDir string, gitProjects []string) func(string) (bool, error) {
+	//Convert the git project list to a set to speed up lookups
+	gitProjectSet := make(map[string]struct{})
+	var exists = struct{}{}
+	for _, project := range gitProjects {
+		gitProjectSet[project] = exists
+	}
+
+	return func(pathToCheck string) (bool, error) {
+		var err error
+		if pathToCheck, err = filepath.Rel(codebaseDir, pathToCheck); err != nil {
+			return false, err
+		}
+		if _, ok := gitProjectSet[pathToCheck]; ok {
+			return true, err
+		}
+		return false, err
+	}
+}
+
+func (f FileCopier) GetContainsGitProjectFunc(codebaseDir string, gitProjects []string) func(string) (bool, error) {
+	//Extract the set of dirs that contain git projects
+	containsGitSet := make(map[string]struct{})
+	var exists = struct{}{}
+	for _, project := range gitProjects {
+		for dir := project; dir != "." && dir != "/"; dir = filepath.Dir(dir) {
+			containsGitSet[dir] = exists
+		}
+	}
+
+	return func(pathToCheck string) (bool, error) {
+		var err error
+		if pathToCheck, err = filepath.Rel(codebaseDir, pathToCheck); err != nil {
+			return false, err
+		}
+		if _, ok := containsGitSet[pathToCheck]; ok {
+			return true, err
+		}
+		return false, err
+	}
+}
+
+//gitProjects is relative to codebaseDir
+func (f FileCopier) Copy(codebaseDir string, gitProjects []string, workspaceDir string) error {
+	isGitProject := f.GetIsGitProjectFunc(codebaseDir, gitProjects)
+	containsGitProject := f.GetContainsGitProjectFunc(codebaseDir, gitProjects)
+
+	return filepath.Walk(codebaseDir,
+		func(path string, info os.FileInfo, err error) error {
+			if err != nil {
+				return err
+			}
+
+			// Copy files
+			if !info.IsDir() {
+				return f.CopyNode(info, codebaseDir, path, workspaceDir)
+			}
+
+			if path == filepath.Clean(codebaseDir) {
+				return nil
+			}
+
+			// Always skip traversal of root repo directories
+			if path == filepath.Join(codebaseDir, ".repo") {
+				return filepath.SkipDir
+			}
+
+			// Skip all git projects
+			var isGitProj bool
+			if isGitProj, err = isGitProject(path); err != nil {
+				return err
+			}
+			if isGitProj {
+				return filepath.SkipDir
+			}
+
+			// Copy over files
+			var containsGitProj bool
+			if containsGitProj, err = containsGitProject(path); err != nil {
+				return err
+			}
+			if !containsGitProj {
+				destPath, err := f.GetDestPath(codebaseDir, path, workspaceDir)
+				if err != nil {
+					return err
+				}
+				if err = f.CopyDirRecursive(info, path, destPath); err != nil {
+					return err
+				}
+				return filepath.SkipDir
+			}
+			return f.CopyNode(info, codebaseDir, path, workspaceDir)
+		})
+}
+
+func (f FileCopier) GetDestPath(codebaseDir, sourcePath, workspaceDir string) (string, error) {
+	if !strings.HasPrefix(sourcePath+"/", codebaseDir+"/") {
+		return "", fmt.Errorf("%s is not contained in %s", sourcePath, codebaseDir)
+	}
+	relPath, err := filepath.Rel(codebaseDir, sourcePath)
+	if err != nil {
+		return "", err
+	}
+	destPath := filepath.Join(workspaceDir, relPath)
+	return destPath, err
+}
+
+// Copy any single file, symlink or dir non-recursively
+// sourcePath must be contained in codebaseDir
+func (f FileCopier) CopyNode(sourceInfo os.FileInfo, codebaseDir, sourcePath, workspaceDir string) error {
+	destPath, err := f.GetDestPath(codebaseDir, sourcePath, workspaceDir)
+	if err != nil {
+		return err
+	}
+	switch {
+	case sourceInfo.Mode()&os.ModeSymlink == os.ModeSymlink:
+		return f.CopySymlink(sourcePath, destPath)
+	case sourceInfo.Mode().IsDir():
+		return f.CopyDirOnly(sourceInfo, destPath)
+	default:
+		return f.CopyFile(sourceInfo, sourcePath, destPath)
+	}
+}
+
+func (f FileCopier) CopySymlink(sourcePath string, destPath string) error {
+	// Skip symlink if it already exists at the destination
+	_, err := os.Lstat(destPath)
+	if err == nil {
+		return nil
+	}
+
+	target, err := os.Readlink(sourcePath)
+	if err != nil {
+		return err
+	}
+
+	return os.Symlink(target, destPath)
+}
+
+// CopyDirOnly copies a directory non-recursively
+// sourcePath must be contained in codebaseDir
+func (f FileCopier) CopyDirOnly(sourceInfo os.FileInfo, destPath string) error {
+	_, err := os.Stat(destPath)
+	if err == nil {
+		// Dir already exists, nothing to do
+		return err
+	} else if os.IsNotExist(err) {
+		return os.Mkdir(destPath, sourceInfo.Mode())
+	}
+	return err
+}
+
+// CopyFile copies a single file
+// sourcePath must be contained in codebaseDir
+func (f FileCopier) CopyFile(sourceInfo os.FileInfo, sourcePath, destPath string) error {
+	//Skip file if it already exists at the destination
+	_, err := os.Lstat(destPath)
+	if err == nil {
+		return nil
+	}
+
+	sourceFile, err := os.Open(sourcePath)
+	if err != nil {
+		return err
+	}
+	defer sourceFile.Close()
+
+	destFile, err := os.Create(destPath)
+	if err != nil {
+		return err
+	}
+	defer destFile.Close()
+
+	_, err = io.Copy(destFile, sourceFile)
+	if err != nil {
+		return err
+	}
+	return os.Chmod(destPath, sourceInfo.Mode())
+}
+
+func (f FileCopier) CopyDirRecursive(sourceInfo os.FileInfo, sourcePath, destPath string) error {
+	if err := f.CopyDirOnly(sourceInfo, destPath); err != nil {
+		return err
+	}
+	childNodes, err := ioutil.ReadDir(sourcePath)
+	if err != nil {
+		return err
+	}
+	for _, childInfo := range childNodes {
+		childSourcePath := filepath.Join(sourcePath, childInfo.Name())
+		childDestPath := filepath.Join(destPath, childInfo.Name())
+		switch {
+		case childInfo.Mode()&os.ModeSymlink == os.ModeSymlink:
+			if err = f.CopySymlink(childSourcePath, childDestPath); err != nil {
+				return err
+			}
+		case childInfo.Mode().IsDir():
+			if err = f.CopyDirRecursive(childInfo, childSourcePath, childDestPath); err != nil {
+				return err
+			}
+		default:
+			if err = f.CopyFile(childInfo, childSourcePath, childDestPath); err != nil {
+				return err
+			}
+		}
+	}
+	return err
+}
diff --git a/hacksaw/workspace/copier_test.go b/hacksaw/workspace/copier_test.go
new file mode 100644
index 0000000..134f64e
--- /dev/null
+++ b/hacksaw/workspace/copier_test.go
@@ -0,0 +1,136 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workspace
+
+import (
+	"io/ioutil"
+	"os"
+	"path"
+	"testing"
+)
+
+func TestCopy(t *testing.T) {
+	// Setup codebase
+	codebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	project1GitDir := path.Join(codebaseDir, "project1", ".git")
+	if err = os.MkdirAll(project1GitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	emptyBytes := []byte{}
+	project1File := path.Join(codebaseDir, "project1", "projectfile")
+	if err = ioutil.WriteFile(project1File, emptyBytes, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	symlink := path.Join(codebaseDir, "symlink")
+	if err = os.Symlink(path.Join(codebaseDir, "project1"), symlink); err != nil {
+		t.Error(err)
+	}
+	project2GitDir := path.Join(codebaseDir, "dirwithprojects", "project2", ".git")
+	if err = os.MkdirAll(project2GitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	dirWithoutProjects := path.Join(codebaseDir, "dirwithoutprojects")
+	if err = os.Mkdir(dirWithoutProjects, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	projectSiblingFile := path.Join(codebaseDir, "dirwithprojects", "projectsiblingfile")
+	if err = ioutil.WriteFile(projectSiblingFile, emptyBytes, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	noProjectFile1 := path.Join(dirWithoutProjects, "noprojectfile1")
+	if err = ioutil.WriteFile(noProjectFile1, emptyBytes, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	noProjectFile2 := path.Join(dirWithoutProjects, "noprojectfile2")
+	if err = ioutil.WriteFile(noProjectFile2, emptyBytes, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	topFile := path.Join(codebaseDir, "topfile")
+	if err = ioutil.WriteFile(topFile, emptyBytes, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	gitProjects := []string{
+		"project1",
+		"dirwithprojects/project2",
+	}
+
+	// Set up workspace
+	workspaceDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	//This dir may already exist if the projects have been mounted
+	wsDirWithProjects := path.Join(workspaceDir, "dirwithprojects")
+	if err = os.Mkdir(wsDirWithProjects, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+
+	copier := NewFileCopier()
+	if err = copier.Copy(codebaseDir, gitProjects, workspaceDir); err != nil {
+		t.Error(err)
+	}
+
+	wsTopFile := path.Join(workspaceDir, "topfile")
+	_, err = os.Stat(wsTopFile)
+	if err != nil {
+		t.Error(err)
+	}
+	wsNoProjectFile1 := path.Join(workspaceDir, "dirwithoutprojects", "noprojectfile1")
+	_, err = os.Stat(wsNoProjectFile1)
+	if err != nil {
+		t.Error(err)
+	}
+	wsNoProjectFile2 := path.Join(workspaceDir, "dirwithoutprojects", "noprojectfile2")
+	_, err = os.Stat(wsNoProjectFile2)
+	if err != nil {
+		t.Error(err)
+	}
+	wsProjectSiblingFile := path.Join(workspaceDir, "dirwithprojects", "projectsiblingfile")
+	_, err = os.Stat(wsProjectSiblingFile)
+	if err != nil {
+		t.Error(err)
+	}
+	wsSymlink := path.Join(workspaceDir, "symlink")
+	_, err = os.Stat(wsSymlink)
+	if err != nil {
+		t.Error(err)
+	}
+	//TODO: check why this is failing
+	//	if linkInfo.Mode() & os.ModeSymlink != os.ModeSymlink {
+	//		t.Error("Symlink not copied as symlink")
+	//	}
+	wsProject2Dir := path.Join(workspaceDir, "dirwithprojects", "project2")
+	_, err = os.Stat(wsProject2Dir)
+	if err == nil {
+		t.Error("Project2 mistakenly copied")
+	} else if os.IsNotExist(err) {
+		//This error is expected
+	} else {
+		t.Error(err)
+	}
+	wsProject1Dir := path.Join(workspaceDir, "project1")
+	_, err = os.Stat(wsProject1Dir)
+	if err == nil {
+		t.Error("Project1 mistakenly copied")
+	} else if os.IsNotExist(err) {
+		//This error is expected
+	} else {
+		t.Error(err)
+	}
+}
diff --git a/hacksaw/workspace/workspace.go b/hacksaw/workspace/workspace.go
new file mode 100644
index 0000000..43fbee6
--- /dev/null
+++ b/hacksaw/workspace/workspace.go
@@ -0,0 +1,329 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package workspace let's you manage workspaces
+package workspace
+
+import (
+	"fmt"
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"strings"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/codebase"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/config"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/git"
+)
+
+type Workspace struct {
+	composer Composer
+	topDir   string
+}
+
+func New(bm bind.PathBinder, topDir string) Workspace {
+	return Workspace{NewComposer(bm), topDir}
+}
+
+// Create workspace
+func (w Workspace) Create(workspaceName string, codebaseName string) (string, error) {
+	cfg := config.GetConfig()
+	_, ok := cfg.Codebases[codebaseName]
+	if !ok {
+		return "", fmt.Errorf("Codebase %s does not exist", codebaseName)
+	}
+	if _, ok := cfg.Workspaces[workspaceName]; ok {
+		return "", fmt.Errorf("Workspace %s already exists", workspaceName)
+	}
+	cfg.Workspaces[workspaceName] = codebaseName
+	workspaceDir, err := w.GetDir(workspaceName)
+	if err != nil {
+		return "", err
+	}
+	if err = os.MkdirAll(workspaceDir, os.ModePerm); err != nil {
+		return "", err
+	}
+	codebaseDir, err := codebase.GetDir(codebaseName)
+	if err != nil {
+		return "", err
+	}
+	//TODO: match the order of parameters with Create
+	if _, err = w.composer.Compose(codebaseDir, workspaceDir); err != nil {
+		return "", err
+	}
+	return workspaceDir, nil
+}
+
+// Recreate workspace
+func (w Workspace) Recreate(workspaceName string) (string, error) {
+	cfg := config.GetConfig()
+	codebaseName, ok := cfg.Workspaces[workspaceName]
+	if !ok {
+		return "", fmt.Errorf("Workspace %s does not exist", workspaceName)
+	}
+	workspaceDir, err := w.GetDir(workspaceName)
+	if err != nil {
+		return "", err
+	}
+	codebaseDir, err := codebase.GetDir(codebaseName)
+	if err != nil {
+		return "", err
+	}
+	if _, err = w.composer.Compose(codebaseDir, workspaceDir); err != nil {
+		return "", err
+	}
+	return workspaceDir, nil
+}
+
+// GetDir retrieves the directory of a specific workspace
+func (w Workspace) GetDir(workspaceName string) (string, error) {
+	cfg := config.GetConfig()
+	_, ok := cfg.Workspaces[workspaceName]
+	if !ok {
+		return "", fmt.Errorf("Workspace %s not found", workspaceName)
+	}
+	dir := filepath.Join(w.topDir, workspaceName)
+	return dir, nil
+}
+
+// GetCodebase retrieves the codebase that a workspace belongs to
+func (w Workspace) GetCodebase(workspaceName string) (string, error) {
+	cfg := config.GetConfig()
+	codebase, ok := cfg.Workspaces[workspaceName]
+	if !ok {
+		return "", fmt.Errorf("Workspace %s not found", workspaceName)
+	}
+	return codebase, nil
+}
+
+//SetTopDir sets the directory that contains all workspaces
+func (w *Workspace) SetTopDir(dir string) {
+	w.topDir = dir
+}
+
+func (w Workspace) List() map[string]string {
+	cfg := config.GetConfig()
+	list := make(map[string]string)
+	for name, codebaseName := range cfg.Workspaces {
+		list[name] = codebaseName
+	}
+	return list
+}
+
+func (w Workspace) DetachGitWorktrees(workspaceName string, unbindList []string) error {
+	workspaceDir, err := w.GetDir(workspaceName)
+	if err != nil {
+		return err
+	}
+	workspaceDir, err = filepath.Abs(workspaceDir)
+	if err != nil {
+		return err
+	}
+	//resolve all symlinks so it can be
+	//matched to mount paths
+	workspaceDir, err = filepath.EvalSymlinks(workspaceDir)
+	if err != nil {
+		return err
+	}
+	codebaseName, err := w.GetCodebase(workspaceName)
+	if err != nil {
+		return err
+	}
+	codebaseDir, err := codebase.GetDir(codebaseName)
+	if err != nil {
+		return err
+	}
+	lister := git.NewRepoLister()
+	gitProjects, err := lister.List(codebaseDir)
+	if err != nil {
+		return err
+	}
+	gitWorktrees := make(map[string]bool)
+	for _, project := range gitProjects {
+		gitWorktrees[project] = true
+	}
+	//projects that were unbound were definitely
+	//never git worktrees
+	for _, unbindPath := range unbindList {
+		project, err := filepath.Rel(workspaceDir, unbindPath)
+		if err != nil {
+			return err
+		}
+		if _, ok := gitWorktrees[project]; ok {
+			gitWorktrees[project] = false
+		}
+	}
+	for project, isWorktree := range gitWorktrees {
+		if !isWorktree {
+			continue
+		}
+		codebaseProject := filepath.Join(codebaseDir, project)
+		workspaceProject := filepath.Join(workspaceDir, project)
+		_, err = os.Stat(workspaceProject)
+		if err == nil {
+			//proceed to detach
+		} else if os.IsNotExist(err) {
+			//just skip if it doesn't exist
+			continue
+		} else {
+			return err
+		}
+		contents, err := ioutil.ReadDir(workspaceProject)
+		if err != nil {
+			return err
+		}
+		if len(contents) == 0 {
+			//empty directory, not even a .git
+			//not a wortree
+			continue
+		}
+		fmt.Print(".")
+		cmd := exec.Command("git",
+			"-C", codebaseProject,
+			"worktree", "remove", "--force", workspaceProject)
+		output, err := cmd.CombinedOutput()
+		if err != nil {
+			return fmt.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+				cmd.String(), err.Error(), output)
+		}
+		cmd = exec.Command("git",
+			"-C", codebaseProject,
+			"branch", "--delete", "--force", workspaceName)
+		output, err = cmd.CombinedOutput()
+		if err != nil {
+			return fmt.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+				cmd.String(), err.Error(), output)
+		}
+	}
+	return nil
+}
+
+func (w Workspace) Remove(remove string) (*config.Config, error) {
+	cfg := config.GetConfig()
+	_, ok := cfg.Workspaces[remove]
+	if !ok {
+		return cfg, fmt.Errorf("Workspace %s not found", remove)
+	}
+	workspaceDir, err := w.GetDir(remove)
+	if err != nil {
+		return cfg, err
+	}
+	unbindList, err := w.composer.Dismantle(workspaceDir)
+	if err != nil {
+		return cfg, err
+	}
+	fmt.Print("Detaching worktrees")
+	if err = w.DetachGitWorktrees(remove, unbindList); err != nil {
+		return cfg, err
+	}
+	fmt.Print("\n")
+	fmt.Println("Removing files")
+	if err = os.RemoveAll(workspaceDir); err != nil {
+		return cfg, err
+	}
+	delete(cfg.Workspaces, remove)
+	return cfg, err
+}
+
+func (w Workspace) Edit(editPath string) (string, string, error) {
+	editPath, err := filepath.Abs(editPath)
+	if err != nil {
+		return "", "", err
+	}
+	editPath, err = filepath.EvalSymlinks(editPath)
+	if err != nil {
+		return "", "", err
+	}
+	relProjectPath, err := w.getReadOnlyProjectFromPath(editPath)
+	if err != nil {
+		return "", "", err
+	}
+	workspaceName, err := w.getWorkspaceFromPath(editPath)
+	if err != nil {
+		return "", "", err
+	}
+	workspaceDir, err := w.GetDir(workspaceName)
+	if err != nil {
+		return "", "", err
+	}
+	codebaseName, err := w.GetCodebase(workspaceName)
+	if err != nil {
+		return "", "", err
+	}
+	codebaseDir, err := codebase.GetDir(codebaseName)
+	if err != nil {
+		return "", "", err
+	}
+	wsProjectPath := filepath.Join(workspaceDir, relProjectPath)
+	if err = w.composer.Unbind(wsProjectPath); err != nil {
+		return "", "", err
+	}
+	//TODO: support editing nested projects
+	//the command above unbinds nested child projects but
+	//we don't rebind them after checking out an editable project branch
+	cbProjectPath := filepath.Join(codebaseDir, relProjectPath)
+	branchName := workspaceName
+	cmd := exec.Command("git",
+		"-C", cbProjectPath,
+		"worktree", "add",
+		"-b", branchName,
+		wsProjectPath)
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		return "", "", fmt.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	return branchName, wsProjectPath, err
+}
+
+func (w Workspace) getReadOnlyProjectFromPath(inPath string) (string, error) {
+	worspaceName, err := w.getWorkspaceFromPath(inPath)
+	if err != nil {
+		return "", err
+	}
+	workspacePath, err := w.GetDir(worspaceName)
+	if err != nil {
+		return "", err
+	}
+	bindList, err := w.composer.List(workspacePath)
+	if err != nil {
+		return "", err
+	}
+	for _, bindPath := range bindList {
+		if !strings.HasPrefix(inPath+"/", bindPath+"/") {
+			continue
+		}
+		relProjectPath, err := filepath.Rel(workspacePath, bindPath)
+		if err != nil {
+			return "", err
+		}
+		return relProjectPath, nil
+	}
+	return "", fmt.Errorf("Path %s is already editable", inPath)
+}
+
+func (w Workspace) getWorkspaceFromPath(inPath string) (string, error) {
+	for workspaceName, _ := range w.List() {
+		dir, err := w.GetDir(workspaceName)
+		if err != nil {
+			return "", err
+		}
+		if strings.HasPrefix(inPath+"/", dir+"/") {
+			return workspaceName, nil
+		}
+	}
+	return "", fmt.Errorf("Path %s is not contained in a workspace", inPath)
+}
diff --git a/hacksaw/workspace/workspace_test.go b/hacksaw/workspace/workspace_test.go
new file mode 100644
index 0000000..c4476ae
--- /dev/null
+++ b/hacksaw/workspace/workspace_test.go
@@ -0,0 +1,457 @@
+// Copyright 2020 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     https://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package workspace
+
+import (
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path"
+	"testing"
+
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/bind"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/codebase"
+	"android.googlesource.com/platform/tools/treble.git/hacksaw/config"
+)
+
+func TestBasicCreate(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	// The top dir must be named "hacksaw"
+	// otherwise the mounters will reject any
+	// mount requests
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	ws := New(bind.NewFakePathBinder(), wsTopDir)
+	if _, err = ws.Create("test-workspace", "test-codebase"); err != nil {
+		t.Error(err)
+	}
+	workspaceDir, err := ws.GetDir("test-workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = os.Stat(workspaceDir)
+	if err != nil {
+		t.Error(err)
+	}
+}
+
+func TestWorkspaceDuplicate(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	// The top dir must be named "hacksaw"
+	// otherwise the mounters will reject any
+	// mount requests
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	ws := New(bind.NewFakePathBinder(), wsTopDir)
+	_, err = ws.Create("test-workspace", "test-codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = ws.Create("test-workspace", "test-codebase")
+	if err == nil {
+		t.Error("Allowed workspace duplicate")
+	}
+}
+
+func TestCreateWorkspaceFromBadCodebase(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	ws := New(bind.NewFakePathBinder(), wsTopDir)
+	if _, err = ws.Create("test-workspace", "does-not-exist"); err == nil {
+		t.Error("Allowed bad codebase")
+	}
+}
+
+func TestList(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	ws := New(bind.NewFakePathBinder(), wsTopDir)
+	if _, err = ws.Create("test-workspace", "test-codebase"); err != nil {
+		t.Error(err)
+	}
+	list := ws.List()
+	cb, ok := list["test-workspace"]
+	if !ok || cb != "test-codebase" {
+		t.Error("Added workspace not listed")
+	}
+}
+
+func TestRemove(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "test")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	gitDir := path.Join(codebaseDir, "project", ".git")
+	if err = os.MkdirAll(gitDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	ws := New(bind.NewFakePathBinder(), wsTopDir)
+	if _, err = ws.Create("test-workspace", "test-codebase"); err != nil {
+		t.Error(err)
+	}
+	workspaceDir, err := ws.GetDir("test-workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = os.Stat(workspaceDir)
+	if err != nil {
+		t.Error(err)
+	}
+	cfg, err := ws.Remove("test-workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	_, ok := cfg.Workspaces["test-codebase"]
+	if ok {
+		t.Error("Removed workspace test-codebase is still in the configuration")
+	}
+	_, err = os.Stat(workspaceDir)
+	if err == nil {
+		t.Error("Workspace test-workspace was removed but its directory remains")
+	} else if os.IsNotExist(err) {
+		// This is the expected error
+	} else {
+		t.Error(err)
+	}
+}
+
+func TestEdit(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	projectDir := path.Join(codebaseDir, "project")
+	if err = os.MkdirAll(projectDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := exec.Command("git", "-C", projectDir, "init")
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	cmd = exec.Command("git", "-C", projectDir, "commit", `--message="Initial commit"`, "--allow-empty")
+	output, err = cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte("project")
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	ws := New(bind.NewFakePathBinder(), wsTopDir)
+	if _, err = ws.Create("test-workspace", "test-codebase"); err != nil {
+		t.Error(err)
+	}
+	workspaceDir, err := ws.GetDir("test-workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = os.Stat(workspaceDir)
+	if err != nil {
+		t.Error(err)
+	}
+	editPath := path.Join(workspaceDir, "project")
+	branchName, wsProjectDir, err := ws.Edit(editPath)
+	if err != nil {
+		t.Error(err)
+	}
+	if branchName == "" {
+		t.Error("Editing returned an empty branch")
+	}
+	if wsProjectDir == "" {
+		t.Error("Editing returned an empty project path")
+	}
+	cmd = exec.Command("git", "-C", wsProjectDir, "show", branchName)
+	output, err = cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	//Recreate workspace and try editing again
+	_, err = ws.Remove("test-workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	_, err = ws.Create("test-workspace", "test-codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	_, _, err = ws.Edit(editPath)
+	if err != nil {
+		t.Error(err)
+	}
+}
+
+const projectList = `read-only-project
+editable-project`
+
+func TestRecreate(t *testing.T) {
+	defer config.GetConfig().Reset()
+	codebaseDir, err := ioutil.TempDir("", "codebase")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(codebaseDir)
+	roProjectDir := path.Join(codebaseDir, "read-only-project")
+	if err = os.MkdirAll(roProjectDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd := exec.Command("git", "-C", roProjectDir, "init")
+	output, err := cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	cmd = exec.Command("git", "-C", roProjectDir, "commit", `--message="Initial commit"`, "--allow-empty")
+	output, err = cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	linkPath := path.Join(codebaseDir, "symlink")
+	if err = os.Symlink(roProjectDir, linkPath); err != nil {
+		t.Error(err)
+	}
+	rwProjectDir := path.Join(codebaseDir, "editable-project")
+	if err = os.MkdirAll(rwProjectDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	cmd = exec.Command("git", "-C", rwProjectDir, "init")
+	output, err = cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	cmd = exec.Command("git", "-C", rwProjectDir, "commit", `--message="Initial commit"`, "--allow-empty")
+	output, err = cmd.CombinedOutput()
+	if err != nil {
+		t.Errorf("Command\n%s\nfailed with the following:\n%s\n%s",
+			cmd.String(), err.Error(), output)
+	}
+	repoDir := path.Join(codebaseDir, ".repo")
+	if err = os.Mkdir(repoDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	listContents := []byte(projectList)
+	listPath := path.Join(repoDir, "project.list")
+	if err = ioutil.WriteFile(listPath, listContents, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	_, err = codebase.Add("test-codebase", codebaseDir)
+	if err != nil {
+		t.Error(err)
+	}
+	wsTempDir, err := ioutil.TempDir("", "workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	defer os.RemoveAll(wsTempDir)
+	wsTopDir := path.Join(wsTempDir, "hacksaw")
+	if err = os.Mkdir(wsTopDir, os.ModePerm); err != nil {
+		t.Error(err)
+	}
+	pathBinder := bind.NewFakePathBinder()
+	ws := New(pathBinder, wsTopDir)
+	if _, err = ws.Create("test-workspace", "test-codebase"); err != nil {
+		t.Error(err)
+	}
+	workspaceDir, err := ws.GetDir("test-workspace")
+	if err != nil {
+		t.Error(err)
+	}
+	editPath := path.Join(workspaceDir, "editable-project")
+	_, _, err = ws.Edit(editPath)
+	if err != nil {
+		t.Error(err)
+	}
+	emptyFilePath := path.Join(editPath, "empty-edit")
+	emptyFile, err := os.Create(emptyFilePath)
+	if err != nil {
+		t.Error(err)
+	}
+	emptyFile.Close()
+	if _, err = ws.Recreate("test-workspace"); err != nil {
+		t.Error(err)
+	}
+	_, err = os.Stat(emptyFilePath)
+	if err != nil {
+		t.Error(err)
+	}
+	wsRoProjectDir := path.Join(workspaceDir, "read-only-project")
+	isRoPathBound := false
+	pathList, err := pathBinder.List()
+	if err != nil {
+		t.Error(err)
+	}
+	for _, path := range pathList {
+		if path == wsRoProjectDir {
+			isRoPathBound = true
+		}
+	}
+	if !isRoPathBound {
+		t.Error("Read only project was not mounted to the workspace")
+	}
+}
diff --git a/split/Android.bp b/split/Android.bp
index 8b0c932..331354b 100644
--- a/split/Android.bp
+++ b/split/Android.bp
@@ -12,9 +12,18 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "tools_treble_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["tools_treble_license"],
+}
+
 python_defaults {
     name: "treble_split_default",
-    pkg_path: "treble",
+    pkg_path: "treble/split",
     version: {
         py2: {
             enabled: false,
@@ -38,6 +47,31 @@
     data: [
         "default_config.xml",
     ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+}
+
+python_library_host {
+    name: "treble_xml_diff",
+    defaults: ["treble_split_default"],
+    srcs: [
+        "xml_diff.py",
+    ],
+}
+
+python_binary_host {
+    name: "treble_manifest_diff",
+    main: "manifest_diff.py",
+    defaults: ["treble_split_default"],
+    srcs: [
+        "manifest_diff.py",
+    ],
+    libs: [
+        "treble_xml_diff",
+    ],
 }
 
 python_test_host {
@@ -45,9 +79,13 @@
     main: "test.py",
     defaults: ["treble_split_default"],
     srcs: [
+        "manifest_diff.py",
+        "manifest_diff_test.py",
         "manifest_split.py",
         "manifest_split_test.py",
         "test.py",
+        "xml_diff.py",
+        "xml_diff_test.py",
     ],
     libs: [
         "py-mock",
diff --git a/split/README.md b/split/README.md
new file mode 100644
index 0000000..db3e7de
--- /dev/null
+++ b/split/README.md
@@ -0,0 +1,107 @@
+# Manifest Split
+
+## Overview
+
+Split manifests are Android repo manifests that contain the minimum set of
+projects necessary to build a given target. If a project isn't used for building
+the target, it shouldn't be in the split manifest. This smaller manifest can be
+used to sync the Android source tree and build the specific target. This sync
+should be faster and smaller than a sync of a full manifest because it is
+syncing less projects.
+
+The `treble_manifest_split` tool is used to automatically create a split
+manifest from a full manifest using dependency information from the source tree
+and the build outputs. The tool attempts to infer as many dependencies as it
+can, but some will be missed due to implicit dependencies in the build system
+and source tree. This is solved by manually fine-tuning a tool configuration XML
+specific to your target.
+
+## Example for aosp_arm64
+
+### 1. Run a full build using a full manifest
+
+The `treble_manifest_split` tool needs the ninja build graph and deps log from a
+completed build in order to have a full view of the dependency graph. While the
+build graph is created at the beginning of a ninja build, the deps log is not
+complete until the build finishes.
+
+Use standard Android build commands to build your target.
+
+### 2. Use the treble_manifest_split tool
+
+```shell
+# Change to the directory where you ran the full build.
+cd /path/to/android
+
+# Set command line variables for the Android target you are using and the build
+# target that should be buildable from your split manifest.
+ANDROID_TARGET=aosp_arm64-userdebug
+BUILD_TARGET=droid
+
+# Build treble_manifest_split as a python binary.
+lunch $ANDROID_TARGET
+m treble_manifest_split
+
+# Create the split manifest using a sample config XML specific to aosp_arm64.
+out/host/linux-x86/bin/treble_manifest_split \
+  --manifest .repo/manifests/default.xml \
+  --split-manifest split_default.xml \
+  --debug-file debug.json \
+  --config tools/treble/split/sample_config.xml \
+  $BUILD_TARGET
+```
+
+### 3. Build using the split manifest
+
+You should test that the split manifest created by the tool can be used to build
+the partial target files package.
+
+1.  Initialize a new repo directory using the steps in
+    https://source.android.com/setup/build/downloading#initializing-a-repo-client.
+1.  Replace the `.repo/manifests/default.xml` full manifest with the
+    newly-generated split manifest.
+1.  Use standard `repo sync` commands to sync your repo.
+1.  Attempt a build of your target.
+
+### 4. Fix build errors
+
+Build errors may arise due to missing dependencies that were previously provided
+by now-removed projects. These dependencies may be implicit in the source code,
+or an explicit dependency type that is not yet able to be automatically detected
+by the tool.
+
+1.  Find the dependency source project in your full-manifest repo directory.
+1.  Update your config XML to manually add projects to your split manifest.
+
+    -   For example, the following line in `sample_config.xml` in this tool
+        directory specifies a project that should be included in the split
+        manifest even if the tool doesn't automatically detect that it is
+        necessary.
+
+    ```
+        <add_project name="platform/external/python/cpython3" />
+    ```
+
+1.  Regenerate the split manifest using `treble_manifest_split` in your
+    full-manifest directory. Remember to pass the path of your config XML to the
+    script's `--config` flag.
+
+### 5. Compare built artifacts
+
+A successful build alone is not sufficient to have full confidence in the split
+manifest. You should diff the output artifacts of the split-manifest build
+against the output artifacts of the full-manifest build.
+
+Suggestions for viewing diffs:
+
+-   Use an external directory diffing tool on the output directories for each
+    partition, such as `out/target/product/<device>/system`.
+-   Use `development/vndk/tools/image-diff-tool/diff.py` on output directories,
+    or on a zipped target-files archive if you are creating `dist` builds.
+
+The following may cause differences between output artifacts:
+
+-   Non-hermetic inputs used in the module build rule, such as timestamps. Can
+    be fixed by removing the timestamp from the build rule.
+-   An implicit and optional source dependency. Can be fixed by manually adding
+    the project that defines the missing source.
diff --git a/split/default_config.xml b/split/default_config.xml
index 478bf9b..f9745b6 100644
--- a/split/default_config.xml
+++ b/split/default_config.xml
@@ -1,25 +1,8 @@
 <?xml version="1.0" encoding="utf-8"?>
 <config>
-    <!-- Needed for repo tool -->
+    <!-- ============================================================= -->
+    <!-- Needed for the repo tool. -->
     <add_project name="platform/tools/repohooks" />
-    <!-- Needed for early build of Soong -->
-    <add_project name="platform/external/golang-protobuf" />
-    <!-- system/iorap: iorap.inode2filename depends on librxcpp -->
-    <add_project name="platform/external/Reactive-Extensions/RxCpp" />
-    <!-- system/bt/gd: bluetooth_packets_python3 depends on pybind11_headers -->
-    <add_project name="platform/external/python/pybind11" />
-    <!-- test/vts/tools/build/tasks/vts_package.mk runs 'cd' on these dirs -->
-    <add_project name="platform/test/vts-testcase/fuzz" />
-    <add_project name="platform/test/vts-testcase/hal" />
-    <add_project name="platform/test/vts-testcase/hal-trace" />
-    <add_project name="platform/test/vts-testcase/kernel" />
-    <add_project name="platform/test/vts-testcase/nbu" />
-    <add_project name="platform/test/vts-testcase/performance" />
-    <add_project name="platform/test/vts-testcase/security" />
-    <add_project name="platform/test/vts-testcase/vndk" />
-    <!-- bionic/libc:libc_ndk depends on libscudo -->
-    <add_project name="platform/external/scudo" />
-    <!-- external/crosvm/gpu_display uses module type wayland_protocol_codegen -->
-    <add_project name="platform/external/wayland-protocols" />
+    <!-- ============================================================= -->
 </config>
 
diff --git a/split/manifest_diff.py b/split/manifest_diff.py
new file mode 100644
index 0000000..3ba99c0
--- /dev/null
+++ b/split/manifest_diff.py
@@ -0,0 +1,275 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Compares two repo manifest xml files.
+
+Checks to see if the manifests contain same projects. And if those projects
+contain the same attributes, linkfile elements and copyfile elements.
+"""
+
+import argparse
+import sys
+import textwrap
+from typing import Set
+import xml.etree.ElementTree as ET
+import dataclasses
+from treble.split import xml_diff
+
+Element = ET.Element
+Change = xml_diff.Change
+ChangeMap = xml_diff.ChangeMap
+
+_SINGLE_NODE_ELEMENTS = ('default', 'manifest-server', 'repo-hooks', 'include')
+_INDENT = (' ' * 2)
+
+
+@dataclasses.dataclass
+class ProjectChanges:
+  """A collection of changes between project elements.
+
+  Attributes:
+    attributes: A ChangeMap of attributes changes. Keyed by attribute name.
+    linkfiles: A ChangeMap of linkfile elements changes. Keyed by dest.
+    copyfiles: A ChangeMap of copyfile elements changes. Keyed by dest.
+  """
+  attributes: ChangeMap = dataclasses.field(default_factory=ChangeMap)
+  linkfiles: ChangeMap = dataclasses.field(default_factory=ChangeMap)
+  copyfiles: ChangeMap = dataclasses.field(default_factory=ChangeMap)
+
+  def __bool__(self):
+    return bool(self.attributes) or bool(self.linkfiles) or bool(self.copyfiles)
+
+  def __repr__(self):
+    if not self:
+      return 'No changes'
+
+    ret_str = ''
+
+    if self.attributes:
+      ret_str += 'Attributes:\n'
+      ret_str += textwrap.indent(str(self.attributes), _INDENT)
+    if self.linkfiles:
+      ret_str += 'Link Files:\n'
+      ret_str += textwrap.indent(str(self.linkfiles), _INDENT)
+    if self.copyfiles:
+      ret_str += 'Copy Files:\n'
+      ret_str += textwrap.indent(str(self.copyfiles), _INDENT)
+
+    return ret_str
+
+
+@dataclasses.dataclass
+class ManifestChanges:
+  """A collection of changes between manifests.
+
+  Attributes:
+    projects: A ChangeMap of changes to project elements. Keyed by project path.
+    remotes: A ChangeMap of changes to remote elements. Keyed by remote name.
+    other: A ChangeMap of changes to other elements. Keyed by element tag.
+  """
+  projects: ChangeMap = dataclasses.field(default_factory=ChangeMap)
+  remotes: ChangeMap = dataclasses.field(default_factory=ChangeMap)
+  other: ChangeMap = dataclasses.field(default_factory=ChangeMap)
+
+  def has_changes(self):
+    return self.projects or self.remotes or self.other
+
+  def __repr__(self):
+    ret_str = 'Project Changes:\n'
+    ret_str += (textwrap.indent(str(self.projects) + '\n', _INDENT)
+                if self.projects else _INDENT + 'No changes found.\n\n')
+    ret_str += 'Remote Changes:\n'
+    ret_str += (textwrap.indent(str(self.remotes) + '\n', _INDENT)
+                if self.remotes else _INDENT + 'No changes found.\n\n')
+    ret_str += 'Other Changes:\n'
+    ret_str += (textwrap.indent(str(self.other) + '\n', _INDENT)
+                if self.other else _INDENT + 'No changes found.\n\n')
+
+    return ret_str
+
+
+def subelement_file_changes(tag: str, p1: Element, p2: Element) -> ChangeMap:
+  """Get the changes copyfile or linkfile elements between two project elements.
+
+  Arguments:
+    tag: The tag of the element.
+    p1: the xml element for the base project.
+    p2: the xml element for the new roject.
+
+  Returns:
+    A ChangeMap of copyfile or linkfile changes. Keyed by dest attribute.
+  """
+  return xml_diff.compare_subelements(
+      tag=tag,
+      p1=p1,
+      p2=p2,
+      ignored_attrs=set(),
+      key_fn=lambda x: x.get('dest'),
+      diff_fn=xml_diff.attribute_changes)
+
+
+def project_changes(p1: Element, p2: Element,
+                    ignored_attrs: Set[str]) -> ProjectChanges:
+  """Get the changes between two project elements.
+
+  Arguments:
+    p1: the xml element for the base project.
+    p2: the xml element for the new project.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ProjectChanges object of the changes.
+  """
+  return ProjectChanges(
+      attributes=xml_diff.attribute_changes(p1, p2, ignored_attrs),
+      linkfiles=subelement_file_changes('linkfile', p1, p2),
+      copyfiles=subelement_file_changes('copyfile', p1, p2))
+
+
+def compare_single_node_elements(manifest_e1: Element, manifest_e2: Element,
+                                 ignored_attrs: Set[str]) -> ChangeMap:
+  """Get the changes between single element nodes such as <defaults> in a manifest.
+
+  Arguments:
+    manifest_e1: the xml element for the base manifest.
+    manifest_e2: the xml element for the new manifest.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ChangeMap of changes. Keyed by elements tag name.
+  """
+  changes = ChangeMap()
+  for tag in _SINGLE_NODE_ELEMENTS:
+    e1 = manifest_e1.find(tag)
+    e2 = manifest_e2.find(tag)
+    if e1 is None and e2 is None:
+      continue
+    elif e1 is None:
+      changes.added[tag] = xml_diff.element_string(e2)
+    elif e2 is None:
+      changes.removed[tag] = xml_diff.element_string(e1)
+    else:
+      attr_changes = xml_diff.attribute_changes(e1, e2, ignored_attrs)
+      if attr_changes:
+        changes.modified[tag] = attr_changes
+  return changes
+
+
+def compare_remote_elements(manifest_e1: Element, manifest_e2: Element,
+                            ignored_attrs: Set[str]) -> ChangeMap:
+  """Get the changes to remote elements between two manifests.
+
+  Arguments:
+    manifest_e1: the xml element for the base manifest.
+    manifest_e2: the xml element for the new manifest.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ChangeMap of changes to remote elements. Keyed by name attribute.
+  """
+  return xml_diff.compare_subelements(
+      tag='remote',
+      p1=manifest_e1,
+      p2=manifest_e2,
+      ignored_attrs=ignored_attrs,
+      key_fn=lambda x: x.get('name'),
+      diff_fn=xml_diff.attribute_changes)
+
+
+def compare_project_elements(manifest_e1, manifest_e2,
+                             ignored_attrs: Set[str]) -> ChangeMap:
+  """Get the changes to project elements between two manifests.
+
+  Arguments:
+    manifest_e1: the xml element for the base manifest.
+    manifest_e2: the xml element for the new manifest.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ChangeMap of changes to project elements. Keyed by path/name attribute.
+  """
+  # Ignore path attribute since it's already keyed on that value and avoid false
+  # detection when path == name on one element and path == None on the other.
+  project_ignored_attrs = ignored_attrs | set(['path'])
+  return xml_diff.compare_subelements(
+      tag='project',
+      p1=manifest_e1,
+      p2=manifest_e2,
+      ignored_attrs=project_ignored_attrs,
+      key_fn=lambda x: x.get('path', x.get('name')),
+      diff_fn=project_changes)
+
+
+def compare_manifest_elements(manifest_e1, manifest_e2,
+                              ignored_attrs: Set[str]) -> ManifestChanges:
+  """Get the changes between two manifests xml elements.
+
+  Arguments:
+    manifest_e1: the xml element for the base manifest.
+    manifest_e2: the xml element for the new manifest.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ManifestChanges.
+  """
+  return ManifestChanges(
+      projects=compare_project_elements(manifest_e1, manifest_e2,
+                                        ignored_attrs),
+      remotes=compare_remote_elements(manifest_e1, manifest_e2, ignored_attrs),
+      other=compare_single_node_elements(manifest_e1, manifest_e2,
+                                         ignored_attrs))
+
+
+def compare_manifest_files(manifest_a: str, manifest_b: str,
+                           ignored_attrs: Set[str]) -> ManifestChanges:
+  """Get the changes between two manifests files.
+
+  Arguments:
+    manifest_a: Path to the base manifest xml file.
+    manifest_b: Path to the manifest xml file to compare against.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ManifestChanges.
+  """
+  e1 = ET.parse(manifest_a).getroot()
+  e2 = ET.parse(manifest_b).getroot()
+  return compare_manifest_elements(
+      manifest_e1=e1, manifest_e2=e2, ignored_attrs=ignored_attrs)
+
+
+def main():
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+  parser.add_argument(
+      '--ignored_attributes',
+      type=str,
+      help='A comma separated list of attributes to ignore when comparing ' +
+      'project elements.')
+  parser.add_argument('manifest_a', help='Path to the base manifest xml file.')
+  parser.add_argument(
+      'manifest_b', help='Path to the manifest xml file to compare against.')
+  args = parser.parse_args()
+
+  ignored_attributes = set(
+      args.ignored_attributes.split(',')) if args.ignored_attributes else set()
+  changes = compare_manifest_files(args.manifest_a, args.manifest_b,
+                                   ignored_attributes)
+
+  print(changes)
+  if changes:
+    sys.exit(1)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/split/manifest_diff_test.py b/split/manifest_diff_test.py
new file mode 100644
index 0000000..ca1e742
--- /dev/null
+++ b/split/manifest_diff_test.py
@@ -0,0 +1,168 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test manifest diff."""
+import unittest
+import xml.etree.ElementTree as ET
+
+import manifest_diff
+
+
+class ManifestDiffTest(unittest.TestCase):
+
+  def _assertEqualCanonical(self, change1, change2):
+    def _canonicalize(change):
+      return {
+          identifier : ' '.join(sorted(value.split(' ')))
+          for identifier, value in change.items()
+      }
+    return self.assertEqual(_canonicalize(change1), _canonicalize(change2))
+
+  def test_project_changes(self):
+    p1 = ET.fromstring("""<project attr1="hello">
+      <linkfile src="newfile2" dest="notneeded" />
+      <linkfile src="oldfile1" dest="dest1" />
+      <copyfile src="oldfile2" dest="dest2" />
+    </project>
+    """)
+    p2 = ET.fromstring("""<project>
+      <linkfile src="newfile1" dest="dest1" />
+      <copyfile src="newfile2" dest="dest2" />
+      <copyfile src="somefile" dest="addedfile" />
+    </project>
+    """)
+    changes = manifest_diff.project_changes(p1, p2, set())
+    self.assertEqual(changes.linkfiles.added, {})
+    self._assertEqualCanonical(
+        changes.linkfiles.removed,
+        {'notneeded': '<linkfile src="newfile2" dest="notneeded" />'})
+    self.assertEqual(
+        changes.linkfiles.modified, {
+            'dest1':
+                manifest_diff.ChangeMap(modified={
+                    'src': manifest_diff.Change('oldfile1', 'newfile1')
+                })
+        })
+    self._assertEqualCanonical(
+        changes.copyfiles.added,
+        {'addedfile': '<copyfile src="somefile" dest="addedfile" />'})
+    self.assertEqual(changes.copyfiles.removed, {})
+    self.assertEqual(
+        changes.copyfiles.modified, {
+            'dest2':
+                manifest_diff.ChangeMap(modified={
+                    'src': manifest_diff.Change('oldfile2', 'newfile2')
+                })
+        })
+    self.assertEqual(
+        changes.attributes,
+        manifest_diff.ChangeMap(
+            added={}, removed={'attr1': 'hello'}, modified={}))
+
+  def test_project_changes_same(self):
+    p1 = ET.fromstring("""<project attr1="hello">
+      <linkfile src="newfile2" dest="notneeded" />
+      <linkfile src="oldfile1" dest="dest1" />
+      <copyfile src="oldfile2" dest="dest2" />
+    </project>
+    """)
+    changes = manifest_diff.project_changes(p1, p1, set())
+    self.assertFalse(changes)
+
+  def test_compare_single_node_elements(self):
+    m1 = ET.fromstring("""<manifest>
+      <default revision='dev' remote='aosp' />
+      <repo-hooks />
+    </manifest>
+    """)
+    m2 = ET.fromstring("""<manifest>
+      <default revision='release' />
+    </manifest>
+    """)
+    changes = manifest_diff.compare_single_node_elements(m1, m2, set())
+    self.assertEqual(changes.added, {})
+    self._assertEqualCanonical(changes.removed, {'repo-hooks': '<repo-hooks />'})
+    self.assertEqual(
+        changes.modified, {
+            'default':
+                manifest_diff.ChangeMap(
+                    added={},
+                    removed={'remote': 'aosp'},
+                    modified={
+                        'revision': manifest_diff.Change('dev', 'release')
+                    })
+        })
+
+  def test_compare_remote_elements(self):
+    m1 = ET.fromstring("""<manifest>
+      <remote revision="dev" name="aosp" fetch="https://aosp-source.com" />
+      <remote name="android" fetch="https://android-source.com" attr="test" />
+      <repo-hooks />
+    </manifest>
+    """)
+    m2 = ET.fromstring("""<manifest>
+      <remote revision="dev" name="android" fetch="https://android-source.com"
+        attr="test2"/>
+    </manifest>
+    """)
+    changes = manifest_diff.compare_remote_elements(m1, m2, set())
+    self.assertEqual(changes.added, {})
+    self._assertEqualCanonical(
+        changes.removed, {
+            'aosp':
+                '<remote revision="dev" name="aosp" fetch="https://aosp-source.com" />'
+        })
+    self.assertEqual(
+        changes.modified, {
+            'android':
+                manifest_diff.ChangeMap(
+                    added={'revision': 'dev'},
+                    removed={},
+                    modified={'attr': manifest_diff.Change('test', 'test2')})
+        })
+
+  def test_compare_project_elements(self):
+    m1 = ET.fromstring("""<manifest>
+      <project name="platform/project1" path="system/project1" />
+      <project name="platform/project2" path="system/project2" />
+      <project name="platform/project3" path="system/project3" />
+    </manifest>""")
+    m2 = ET.fromstring("""<manifest>
+      <project name="platform/project1" path="system/project1" />
+      <project name="system/project2" />
+      <project name="platform/project4" path="system/project4" />
+    </manifest>""")
+    changes = manifest_diff.compare_project_elements(m1, m2, set())
+    self._assertEqualCanonical(
+        changes.added, {
+            'system/project4':
+                '<project name="platform/project4" path="system/project4" />'
+        })
+    self._assertEqualCanonical(
+        changes.removed, {
+            'system/project3':
+                '<project name="platform/project3" path="system/project3" />'
+        })
+    self.assertEqual(
+        changes.modified, {
+            'system/project2':
+                manifest_diff.ProjectChanges(
+                    attributes=manifest_diff.ChangeMap(
+                        added={},
+                        removed={},
+                        modified={
+                            'name':
+                                manifest_diff.Change('platform/project2',
+                                                     'system/project2')
+                        }))
+        })
diff --git a/split/manifest_split.py b/split/manifest_split.py
index 89f8dc5..d5f9b95 100644
--- a/split/manifest_split.py
+++ b/split/manifest_split.py
@@ -32,6 +32,12 @@
             <add_project name="vendor/my/needed/project" />
             <remove_project name="vendor/my/unused/project" />
           </config>
+  --ignore-default-config
+      If provided, don't include default_config.xml.
+  --installed-prebuilt
+      Specify the directory containing an installed prebuilt Android.bp file.
+      Supply this option zero or more times, once for each installed prebuilt
+      directory.
   --repo-list <path>
       Optional path to the output of the 'repo list' command. Used if the
       output of 'repo list' needs pre-processing before being used by
@@ -44,15 +50,20 @@
   --module-info <path>
       Optional path to the module-info.json file found in an out dir.
       If not provided, the default file is used based on the lunch environment.
+  --skip-module-info
+      If provided, skip parsing module-info.json for direct and adjacent
+      dependencies. Overrides --module-info option.
   --kati-stamp <path>
       Optional path to the .kati_stamp file found in an out dir.
       If not provided, the default file is used based on the lunch environment.
+  --skip-kati
+      If provided, skip Kati makefiles projects. Overrides --kati-stamp option.
   --overlay <path>
       Optional path(s) to treat as overlays when parsing the kati stamp file
       and scanning for makefiles. See the tools/treble/build/sandbox directory
       for more info about overlays. This flag can be passed more than once.
-  --debug
-      Print debug messages.
+  --debug-file <path>
+      If provided, debug info will be written to a JSON file at this path.
   -h  (--help)
       Display this usage message and exit.
 """
@@ -60,15 +71,20 @@
 from __future__ import print_function
 
 import getopt
-import hashlib
 import json
 import logging
 import os
-import pkg_resources
+import pkgutil
+import re
 import subprocess
 import sys
+import tempfile
+from typing import Dict, List, Pattern, Set, Tuple
 import xml.etree.ElementTree as ET
 
+import dataclasses
+
+
 logging.basicConfig(
     stream=sys.stdout,
     level=logging.INFO,
@@ -78,62 +94,132 @@
 
 # Projects determined to be needed despite the dependency not being visible
 # to ninja.
-DEFAULT_CONFIG_PATH = pkg_resources.resource_filename(__name__,
-                                                      "default_config.xml")
+DEFAULT_CONFIG_XML = "default_config.xml"
+
+# Pattern that matches a java dependency.
+_JAVA_LIB_PATTERN = re.compile(
+    # pylint: disable=line-too-long
+    '^out/target/common/obj/JAVA_LIBRARIES/(.+)_intermediates/classes-header.jar$'
+)
 
 
-def read_config(config_file):
-  """Reads a config XML file to find extra projects to add or remove.
+@dataclasses.dataclass
+class PathMappingConfig:
+  pattern: Pattern[str]
+  sub: str
 
-  Args:
-    config_file: The filename of the config XML.
 
-  Returns:
-    A tuple of (set of remove_projects, set of add_projects) from the config.
+@dataclasses.dataclass
+class ManifestSplitConfig:
+  """Holds the configuration for the split manifest tool.
+
+  Attributes:
+    remove_projects: A Dict of project name to the config file that specified
+      this project, for projects that should be removed from the resulting
+      manifest.
+    add_projects: A Dict of project name to the config file that specified
+      this project, for projects that should be added to the resulting manifest.
+    path_mappings: A list of PathMappingConfigs to modify a path in the build
+      sandbox to the path in the manifest.
   """
-  root = ET.parse(config_file).getroot()
-  remove_projects = set(
-      [child.attrib["name"] for child in root.findall("remove_project")])
-  add_projects = set(
-      [child.attrib["name"] for child in root.findall("add_project")])
-  return remove_projects, add_projects
+  remove_projects: Dict[str, str]
+  add_projects: Dict[str, str]
+  path_mappings: List[PathMappingConfig]
+
+  @classmethod
+  def from_config_files(cls, config_files: List[str]):
+    """Reads from a list of config XML files.
+
+    Args:
+      config_files: A list of config XML filenames.
+
+    Returns:
+      A ManifestSplitConfig from the files.
+    """
+    remove_projects: Dict[str, str] = {}
+    add_projects: Dict[str, str] = {}
+    path_mappings = []
+    for config_file in config_files:
+      root = ET.parse(config_file).getroot()
+
+      remove_projects.update({
+          c.attrib["name"]: config_file for c in root.findall("remove_project")
+      })
+
+      add_projects.update(
+          {c.attrib["name"]: config_file for c in root.findall("add_project")})
+
+      path_mappings.extend([
+          PathMappingConfig(
+              re.compile(child.attrib["pattern"]), child.attrib["sub"])
+          for child in root.findall("path_mapping")
+      ])
+
+    return cls(remove_projects, add_projects, path_mappings)
 
 
-def get_repo_projects(repo_list_file):
-  """Returns a dict of { project path : project name } using 'repo list'.
+def get_repo_projects(repo_list_file, manifest, path_mappings):
+  """Returns a dict of { project path : project name } using the manifest.
+
+  The path_mappings stop on the first match mapping.  If the mapping results in
+  an empty string, that entry is removed.
 
   Args:
-    repo_list_file: An optional filename to read instead of calling the repo
-      list command.
+    repo_list_file: An optional filename to read instead of parsing the manifest.
+    manifest: The manifest object to scan for projects.
+    path_mappings: A list of PathMappingConfigs to modify a path in the build
+      sandbox to the path in the manifest.
   """
   repo_list = []
 
   if repo_list_file:
     with open(repo_list_file) as repo_list_lines:
-      repo_list = [line.strip() for line in repo_list_lines if line.strip()]
+      repo_list = [line.strip().split(" : ") for line in repo_list_lines if line.strip()]
   else:
-    repo_list = subprocess.check_output([
-        "repo",
-        "list",
-    ]).decode().strip("\n").split("\n")
-  return dict([entry.split(" : ") for entry in repo_list])
+    root = manifest.getroot()
+    repo_list = [(p.get("path", p.get("name")), p.get("name")) for p in root.findall("project")]
+
+  repo_dict = {}
+  for entry in repo_list:
+    path, project = entry
+    for mapping in path_mappings:
+      if mapping.pattern.fullmatch(path):
+        path = mapping.pattern.sub(mapping.sub, path)
+        break
+    # If the resulting path mapping is empty, then don't add entry
+    if path:
+      repo_dict[path] = project
+  return repo_dict
 
 
-def get_module_info(module_info_file, repo_projects):
-  """Returns a dict of { project name : set of modules } in each project.
+class ModuleInfo:
+  """Contains various mappings to/from module/project"""
 
-  Args:
-    module_info_file: The path to a module-info.json file from a build.
-    repo_projects: The output of the get_repo_projects function.
+  def __init__(self, module_info_file, repo_projects):
+    """Initialize a module info instance.
 
-  Raises:
-    ValueError: A module from module-info.json belongs to a path not
-      known by the repo projects output.
-  """
-  project_modules = {}
+    Builds various maps related to platform build system modules and how they
+    relate to each other and projects.
 
-  with open(module_info_file) as module_info_file:
-    module_info = json.load(module_info_file)
+    Args:
+      module_info_file: The path to a module-info.json file from a build.
+      repo_projects: The output of the get_repo_projects function.
+
+    Raises:
+      ValueError: A module from module-info.json belongs to a path not
+        known by the repo projects output.
+    """
+    # Maps a project to the set of modules it contains.
+    self.project_modules = {}
+    # Maps a module to the project that contains it.
+    self.module_project = {}
+    # Maps a module to its class.
+    self.module_class = {}
+    # Maps a module to modules it depends on.
+    self.module_deps = {}
+
+    with open(module_info_file) as module_info_file:
+      module_info = json.load(module_info_file)
 
     def module_has_valid_path(module):
       return ("path" in module_info[module] and module_info[module]["path"] and
@@ -153,8 +239,25 @@
       if not project_path:
         raise ValueError("Unknown module path for module %s: %s" %
                          (module, module_info[module]))
-      project_modules.setdefault(repo_projects[project_path], set()).add(module)
-    return project_modules
+      repo_project = repo_projects[project_path]
+      self.project_modules.setdefault(repo_project, set()).add(module)
+      self.module_project[module] = repo_project
+
+    def dep_from_raw_dep(raw_dep):
+      match = re.search(_JAVA_LIB_PATTERN, raw_dep)
+      return match.group(1) if match else raw_dep
+
+    def deps_from_raw_deps(raw_deps):
+      return [dep_from_raw_dep(raw_dep) for raw_dep in raw_deps]
+
+    self.module_class = {
+        module: module_info[module]["class"][0]
+        for module in module_info
+    }
+    self.module_deps = {
+        module: deps_from_raw_deps(module_info[module]["dependencies"])
+        for module in module_info
+    }
 
 
 def get_ninja_inputs(ninja_binary, ninja_build_file, modules):
@@ -165,18 +268,35 @@
   Args:
     ninja_binary: The path to a ninja binary.
     ninja_build_file: The path to a .ninja file from a build.
-    modules: The set of modules to scan for inputs.
+    modules: The list of modules to scan for inputs.
   """
-  inputs = set(
-      subprocess.check_output([
-          ninja_binary,
-          "-f",
-          ninja_build_file,
-          "-t",
-          "inputs",
-          "-d",
-      ] + list(modules)).decode().strip("\n").split("\n"))
-  return {path.strip() for path in inputs}
+  inputs = set()
+  NINJA_SHARD_LIMIT = 20000
+  for i in range(0, len(modules), NINJA_SHARD_LIMIT):
+    modules_shard = modules[i:i + NINJA_SHARD_LIMIT]
+    inputs = inputs.union(set(
+        subprocess.check_output([
+            ninja_binary,
+            "-f",
+            ninja_build_file,
+            "-t",
+            "inputs",
+            "-d",
+        ] + list(modules_shard)).decode().strip("\n").split("\n")))
+
+  def input_allowed(path):
+    path = path.strip()
+    if path.endswith("TEST_MAPPING") and "test_mapping" not in modules:
+      # Exclude projects that are only needed for TEST_MAPPING files, unless the
+      # user is asking to build 'test_mapping'.
+      return False
+    if path.endswith("MODULE_LICENSE_GPL"):
+      # Exclude projects that are included only due to having a
+      # MODULE_LICENSE_GPL file, if no other inputs from that project are used.
+      return False
+    return path
+
+  return {path.strip() for path in inputs if input_allowed(path)}
 
 
 def get_kati_makefiles(kati_stamp_file, overlays):
@@ -278,21 +398,22 @@
 
 
 def get_input_projects(repo_projects, inputs):
-  """Returns the set of project names that contain the given input paths.
+  """Returns the collection of project names that contain the given input paths.
 
   Args:
     repo_projects: The output of the get_repo_projects function.
     inputs: The paths of input files used in the build, as given by the ninja
       inputs tool.
   """
-  input_project_paths = [
-      scan_repo_projects(repo_projects, input_path)
-      for input_path in inputs
-      if (not input_path.startswith("out/") and not input_path.startswith("/"))
-  ]
+  input_project_paths = {}
+  for input_path in inputs:
+    if not input_path.startswith("out/") and not input_path.startswith("/"):
+      input_project_paths.setdefault(
+          scan_repo_projects(repo_projects, input_path), []).append(input_path)
+
   return {
-      repo_projects[project_path]
-      for project_path in input_project_paths
+      repo_projects[project_path]: inputs
+      for project_path, inputs in input_project_paths.items()
       if project_path is not None
   }
 
@@ -317,28 +438,21 @@
   return manifest
 
 
-def create_manifest_sha1_element(manifest, name):
-  """Creates and returns an ElementTree 'hash' Element using a sha1 hash.
-
-  Args:
-    manifest: The manifest ElementTree to hash.
-    name: The name string to give this element.
-
-  Returns:
-    The ElementTree 'hash' Element.
-  """
-  sha1_element = ET.Element("hash")
-  sha1_element.set("type", "sha1")
-  sha1_element.set("name", name)
-  sha1_element.set("value",
-                   hashlib.sha1(ET.tostring(manifest.getroot())).hexdigest())
-  return sha1_element
+@dataclasses.dataclass
+class DebugInfo:
+  """Simple class to store structured debug info for a project."""
+  direct_input: bool = False
+  adjacent_input: bool = False
+  deps_input: bool = False
+  kati_makefiles: List[str] = dataclasses.field(default_factory=list)
+  manual_add_config: str = ""
+  manual_remove_config: str = ""
 
 
 def create_split_manifest(targets, manifest_file, split_manifest_file,
                           config_files, repo_list_file, ninja_build_file,
                           ninja_binary, module_info_file, kati_stamp_file,
-                          overlays):
+                          overlays, installed_prebuilts, debug_file):
   """Creates and writes a split manifest by inspecting build inputs.
 
   Args:
@@ -356,123 +470,177 @@
     kati_stamp_file: The path to a .kati_stamp file from a build.
     overlays: A list of paths to treat as overlays when parsing the kati stamp
       file.
+    installed_prebuilts: A list of paths for which to create "fake" repo
+      entries. These entries allow the tool to recognize modules that installed
+      rather than being sync'd via a manifest.
+    debug_file: If not None, the path to write JSON debug info.
   """
-  remove_projects = set()
-  add_projects = set()
-  for config_file in config_files:
-    config_remove_projects, config_add_projects = read_config(config_file)
-    remove_projects = remove_projects.union(config_remove_projects)
-    add_projects = add_projects.union(config_add_projects)
+  debug_info = {}
 
-  repo_projects = get_repo_projects(repo_list_file)
-  module_info = get_module_info(module_info_file, repo_projects)
+  config = ManifestSplitConfig.from_config_files(config_files)
+  original_manifest = ET.parse(manifest_file)
+
+
+  repo_projects = get_repo_projects(repo_list_file, original_manifest,
+                                    config.path_mappings)
+  repo_projects.update({ip: ip for ip in installed_prebuilts})
 
   inputs = get_ninja_inputs(ninja_binary, ninja_build_file, targets)
-  input_projects = get_input_projects(repo_projects, inputs)
-  if logger.isEnabledFor(logging.DEBUG):
-    for project in sorted(input_projects):
-      logger.debug("Direct dependency: %s", project)
-  logger.info("%s projects needed for targets \"%s\"", len(input_projects),
-              " ".join(targets))
+  input_projects = set(get_input_projects(repo_projects, inputs).keys())
+  for project in input_projects:
+    debug_info.setdefault(project, DebugInfo()).direct_input = True
+  logger.info(
+      "%s projects needed for Ninja-graph direct dependencies of targets \"%s\"",
+      len(input_projects), " ".join(targets))
 
-  kati_makefiles = get_kati_makefiles(kati_stamp_file, overlays)
-  kati_makefiles_projects = get_input_projects(repo_projects, kati_makefiles)
-  if logger.isEnabledFor(logging.DEBUG):
-    for project in sorted(kati_makefiles_projects.difference(input_projects)):
-      logger.debug("Kati makefile dependency: %s", project)
-  input_projects = input_projects.union(kati_makefiles_projects)
-  logger.info("%s projects after including Kati makefiles projects.",
-              len(input_projects))
+  if kati_stamp_file:
+    kati_makefiles = get_kati_makefiles(kati_stamp_file, overlays)
+    kati_makefiles_projects = get_input_projects(repo_projects, kati_makefiles)
+    for project, makefiles in kati_makefiles_projects.items():
+      debug_info.setdefault(project, DebugInfo()).kati_makefiles = makefiles
+    input_projects = input_projects.union(kati_makefiles_projects.keys())
+    logger.info("%s projects after including Kati makefiles projects.",
+                len(input_projects))
+  else:
+    logger.info("Kati makefiles projects skipped.")
 
-  if logger.isEnabledFor(logging.DEBUG):
-    manual_projects = add_projects.difference(input_projects)
-    for project in sorted(manual_projects):
-      logger.debug("Manual inclusion: %s", project)
-  input_projects = input_projects.union(add_projects)
+  for project, cfile in config.add_projects.items():
+    debug_info.setdefault(project, DebugInfo()).manual_add_config = cfile
+  for project, cfile in config.remove_projects.items():
+    debug_info.setdefault(project, DebugInfo()).manual_remove_config = cfile
+  input_projects = input_projects.union(config.add_projects.keys())
   logger.info("%s projects after including manual additions.",
               len(input_projects))
 
   # Remove projects from our set of input projects before adding adjacent
   # modules, so that no project is added only because of an adjacent
   # dependency in a to-be-removed project.
-  input_projects = input_projects.difference(remove_projects)
+  input_projects = input_projects.difference(config.remove_projects.keys())
 
   # While we still have projects whose modules we haven't checked yet,
-  checked_projects = set()
-  projects_to_check = input_projects.difference(checked_projects)
+  if module_info_file:
+    module_info = ModuleInfo(module_info_file, repo_projects)
+    checked_projects = set()
+    projects_to_check = input_projects.difference(checked_projects)
+    logger.info("Checking module-info dependencies for direct and adjacent modules...")
+  else:
+    logging.info("Direct and adjacent modules skipped.")
+    projects_to_check = None
+
+  iteration = 0
+
   while projects_to_check:
+    iteration += 1
     # check all modules in each project,
     modules = []
+    deps_additions = set()
+
+    def process_deps(module):
+      for d in module_info.module_deps[module]:
+        if d in module_info.module_class:
+          if module_info.module_class[d] == "HEADER_LIBRARIES":
+            hla = module_info.module_project[d]
+            if hla not in input_projects:
+              deps_additions.add(hla)
+
     for project in projects_to_check:
       checked_projects.add(project)
-      if project not in module_info:
+      if project not in module_info.project_modules:
         continue
-      modules += module_info[project]
+      for module in module_info.project_modules[project]:
+        modules.append(module)
+        process_deps(module)
+
+    for project in deps_additions:
+      debug_info.setdefault(project, DebugInfo()).deps_input = True
+    input_projects = input_projects.union(deps_additions)
+    logger.info(
+        "pass %d - %d projects after including HEADER_LIBRARIES dependencies",
+        iteration, len(input_projects))
 
     # adding those modules' input projects to our list of projects.
     inputs = get_ninja_inputs(ninja_binary, ninja_build_file, modules)
-    adjacent_module_additions = get_input_projects(repo_projects, inputs)
-    if logger.isEnabledFor(logging.DEBUG):
-      for project in sorted(
-          adjacent_module_additions.difference(input_projects)):
-        logger.debug("Adjacent module dependency: %s", project)
+    adjacent_module_additions = set(
+        get_input_projects(repo_projects, inputs).keys())
+    for project in adjacent_module_additions:
+      debug_info.setdefault(project, DebugInfo()).adjacent_input = True
     input_projects = input_projects.union(adjacent_module_additions)
-    logger.info("%s total projects so far.", len(input_projects))
+    logger.info(
+        "pass %d - %d projects after including adjacent-module Ninja-graph dependencies",
+        iteration, len(input_projects))
 
     projects_to_check = input_projects.difference(checked_projects)
 
-  original_manifest = ET.parse(manifest_file)
-  original_sha1 = create_manifest_sha1_element(original_manifest, "original")
+  logger.info("%s projects - complete", len(input_projects))
+
   split_manifest = update_manifest(original_manifest, input_projects,
-                                   remove_projects)
-  split_manifest.getroot().append(original_sha1)
-  split_manifest.getroot().append(
-      create_manifest_sha1_element(split_manifest, "self"))
+                                   config.remove_projects.keys())
   split_manifest.write(split_manifest_file)
 
+  if debug_file:
+    with open(debug_file, "w") as debug_fp:
+      logger.info("Writing debug info to %s", debug_file)
+      json.dump(
+          debug_info,
+          fp=debug_fp,
+          sort_keys=True,
+          indent=2,
+          default=lambda info: info.__dict__)
+
 
 def main(argv):
   try:
     opts, args = getopt.getopt(argv, "h", [
         "help",
-        "debug",
+        "debug-file=",
         "manifest=",
         "split-manifest=",
         "config=",
+        "ignore-default-config",
         "repo-list=",
         "ninja-build=",
         "ninja-binary=",
         "module-info=",
+        "skip-module-info",
         "kati-stamp=",
+        "skip-kati",
         "overlay=",
+        "installed-prebuilt=",
     ])
   except getopt.GetoptError as err:
     print(__doc__, file=sys.stderr)
     print("**%s**" % str(err), file=sys.stderr)
     sys.exit(2)
 
+  debug_file = None
   manifest_file = None
   split_manifest_file = None
-  config_files = [DEFAULT_CONFIG_PATH]
+  config_files = []
   repo_list_file = None
   ninja_build_file = None
   module_info_file = None
-  ninja_binary = "ninja"
+  ninja_binary = "prebuilts/build-tools/linux-x86/bin/ninja"
   kati_stamp_file = None
   overlays = []
+  installed_prebuilts = []
+  ignore_default_config = False
+  skip_kati = False
+  skip_module_info = False
 
   for o, a in opts:
     if o in ("-h", "--help"):
       print(__doc__, file=sys.stderr)
       sys.exit()
-    elif o in ("--debug"):
-      logger.setLevel(logging.DEBUG)
+    elif o in ("--debug-file"):
+      debug_file = a
     elif o in ("--manifest"):
       manifest_file = a
     elif o in ("--split-manifest"):
       split_manifest_file = a
     elif o in ("--config"):
       config_files.append(a)
+    elif o == "--ignore-default-config":
+      ignore_default_config = True
     elif o in ("--repo-list"):
       repo_list_file = a
     elif o in ("--ninja-build"):
@@ -481,10 +649,16 @@
       ninja_binary = a
     elif o in ("--module-info"):
       module_info_file = a
+    elif o == "--skip-module-info":
+      skip_module_info = True
     elif o in ("--kati-stamp"):
       kati_stamp_file = a
+    elif o == "--skip-kati":
+      skip_kati = True
     elif o in ("--overlay"):
       overlays.append(a)
+    elif o in ("--installed-prebuilt"):
+      installed_prebuilts.append(a)
     else:
       assert False, "unknown option \"%s\"" % o
 
@@ -500,29 +674,47 @@
     print(__doc__, file=sys.stderr)
     print("**Missing required flag --split-manifest**", file=sys.stderr)
     sys.exit(2)
-  if not module_info_file:
+
+  if skip_module_info:
+    if module_info_file:
+      logging.warning("User provided both --skip-module-info and --module-info args.  Arg --module-info ignored.")
+    module_info_file = None
+  elif not module_info_file:
     module_info_file = os.path.join(os.environ["ANDROID_PRODUCT_OUT"],
                                     "module-info.json")
-  if not kati_stamp_file:
+  if skip_kati:
+    if kati_stamp_file:
+      logging.warning("User provided both --skip-kati and --kati-stamp args.  Arg --kati-stamp ignored.")
+    kati_stamp_file = None
+  elif not kati_stamp_file:
     kati_stamp_file = os.path.join(
         os.environ["ANDROID_BUILD_TOP"], "out",
         ".kati_stamp-%s" % os.environ["TARGET_PRODUCT"])
+
   if not ninja_build_file:
     ninja_build_file = os.path.join(
         os.environ["ANDROID_BUILD_TOP"], "out",
         "combined-%s.ninja" % os.environ["TARGET_PRODUCT"])
 
-  create_split_manifest(
-      targets=args,
-      manifest_file=manifest_file,
-      split_manifest_file=split_manifest_file,
-      config_files=config_files,
-      repo_list_file=repo_list_file,
-      ninja_build_file=ninja_build_file,
-      ninja_binary=ninja_binary,
-      module_info_file=module_info_file,
-      kati_stamp_file=kati_stamp_file,
-      overlays=overlays)
+  with tempfile.NamedTemporaryFile() as default_config_file:
+    if not ignore_default_config:
+      default_config_file.write(pkgutil.get_data(__name__, DEFAULT_CONFIG_XML))
+      default_config_file.flush()
+      config_files.insert(0, default_config_file.name)
+
+    create_split_manifest(
+        targets=args,
+        manifest_file=manifest_file,
+        split_manifest_file=split_manifest_file,
+        config_files=config_files,
+        repo_list_file=repo_list_file,
+        ninja_build_file=ninja_build_file,
+        ninja_binary=ninja_binary,
+        module_info_file=module_info_file,
+        kati_stamp_file=kati_stamp_file,
+        overlays=overlays,
+        installed_prebuilts=installed_prebuilts,
+        debug_file=debug_file)
 
 
 if __name__ == "__main__":
diff --git a/split/manifest_split_test.py b/split/manifest_split_test.py
index 2fdb9bb..546d3c1 100644
--- a/split/manifest_split_test.py
+++ b/split/manifest_split_test.py
@@ -13,9 +13,10 @@
 # limitations under the License.
 """Test manifest split."""
 
-import hashlib
+import json
 import mock
 import os
+import re
 import subprocess
 import tempfile
 import unittest
@@ -34,12 +35,40 @@
           <add_project name="add2" />
           <remove_project name="remove1" />
           <remove_project name="remove2" />
+          <path_mapping pattern="p1.*" sub="$0" />
         </config>""")
       test_config.flush()
-      remove_projects, add_projects = manifest_split.read_config(
-          test_config.name)
-      self.assertEqual(remove_projects, set(['remove1', 'remove2']))
-      self.assertEqual(add_projects, set(['add1', 'add2']))
+      config = manifest_split.ManifestSplitConfig.from_config_files(
+          [test_config.name])
+      self.assertEqual(config.remove_projects, {
+          'remove1': test_config.name,
+          'remove2': test_config.name
+      })
+      self.assertEqual(config.add_projects, {
+          'add1': test_config.name,
+          'add2': test_config.name
+      })
+      self.assertEqual(config.path_mappings, [
+          manifest_split.PathMappingConfig(re.compile('p1.*'), '$0'),
+      ])
+
+  def test_get_repo_projects_from_manifest(self):
+    manifest_contents = """
+      <manifest>
+        <project name="platform/project1" path="system/project1" />
+        <project name="platform/project2" path="system/project2" />
+        <project name="platform/project3" path="system/project3" />
+      </manifest>"""
+    manifest = ET.ElementTree(ET.fromstring(manifest_contents))
+    projects = manifest_split.get_repo_projects(
+        None, manifest, path_mappings=[])
+    self.assertDictEqual(
+        {
+            'system/project1': 'platform/project1',
+            'system/project2': 'platform/project2',
+            'system/project3': 'platform/project3',
+        }, projects)
+
 
   def test_get_repo_projects(self):
     with tempfile.NamedTemporaryFile('w+t') as repo_list_file:
@@ -47,7 +76,29 @@
         system/project1 : platform/project1
         system/project2 : platform/project2""")
       repo_list_file.flush()
-      repo_projects = manifest_split.get_repo_projects(repo_list_file.name)
+      repo_projects = manifest_split.get_repo_projects(
+          repo_list_file.name, None, path_mappings=[])
+      self.assertEqual(
+          repo_projects, {
+              'system/project1': 'platform/project1',
+              'system/project2': 'platform/project2',
+          })
+
+  def test_get_repo_projects_with_mappings(self):
+    with tempfile.NamedTemporaryFile('w+t') as repo_list_file:
+      repo_list_file.write("""
+        overlay/system/project1 : platform/project1
+        system/project2 : platform/project2
+        hide/this/one : platform/project3""")
+      repo_list_file.flush()
+      path_mappings = [
+          manifest_split.PathMappingConfig(re.compile('^overlay/(.*)'), '\\1'),
+          manifest_split.PathMappingConfig(re.compile('^hide/this/one.*'), ''),
+      ]
+
+      repo_projects = manifest_split.get_repo_projects(repo_list_file.name,
+                                                       None,
+                                                       path_mappings)
       self.assertEqual(
           repo_projects, {
               'system/project1': 'platform/project1',
@@ -57,34 +108,89 @@
   def test_get_module_info(self):
     with tempfile.NamedTemporaryFile('w+t') as module_info_file:
       module_info_file.write("""{
-        "target1a": { "path": ["system/project1"] },
-        "target1b": { "path": ["system/project1"] },
-        "target2": { "path": ["out/project2"] },
-        "target3": { "path": ["vendor/google/project3"] }
+        "target1a": { "class": ["EXECUTABLES"], "path": ["system/project1"], "dependencies": ["target2"] },
+        "target1b": { "class": ["EXECUTABLES"], "path": ["system/project1"], "dependencies": ["target3", "target42"] },
+        "target2": { "class": ["SHARED_LIBRARIES"], "path": ["out/project2"], "dependencies": [] },
+        "target3": { "class": ["SHARED_LIBRARIES"], "path": ["vendor/google/project3"], "dependencies": ["x", "y", "z"] },
+        "target4a": { "class": ["APPS"], "path": ["system/project4"], "dependencies": ["out/target/common/obj/JAVA_LIBRARIES/target4b_intermediates/classes-header.jar"] },
+        "target4b": { "class": ["JAVA_LIBRARIES"],  "path": ["system/project4"], "dependencies": [] }
       }""")
       module_info_file.flush()
       repo_projects = {
           'system/project1': 'platform/project1',
+          'system/project4': 'platform/project4',
           'vendor/google/project3': 'vendor/project3',
       }
-      module_info = manifest_split.get_module_info(module_info_file.name,
-                                                   repo_projects)
+      module_info = manifest_split.ModuleInfo(module_info_file.name,
+                                              repo_projects)
       self.assertEqual(
-          module_info, {
+          module_info.project_modules, {
               'platform/project1': set(['target1a', 'target1b']),
+              'platform/project4': set(['target4a', 'target4b']),
               'vendor/project3': set(['target3']),
           })
+      self.assertEqual(
+          module_info.module_project, {
+              'target1a': 'platform/project1',
+              'target1b': 'platform/project1',
+              'target3': 'vendor/project3',
+              'target4a': 'platform/project4',
+              'target4b': 'platform/project4',
+          })
+      self.assertEqual(
+          module_info.module_class, {
+              'target1a': 'EXECUTABLES',
+              'target1b': 'EXECUTABLES',
+              'target2': 'SHARED_LIBRARIES',
+              'target3': 'SHARED_LIBRARIES',
+              'target4a': 'APPS',
+              'target4b': 'JAVA_LIBRARIES',
+          })
+      self.assertEqual(
+          module_info.module_deps, {
+              'target1a': ['target2'],
+              'target1b': ['target3', 'target42'],
+              'target2': [],
+              'target3': ['x', 'y', 'z'],
+              'target4a': ['target4b'],
+              'target4b': [],
+          })
 
   def test_get_module_info_raises_on_unknown_module_path(self):
     with tempfile.NamedTemporaryFile('w+t') as module_info_file:
       module_info_file.write("""{
-        "target1": { "path": ["system/unknown/project1"] }
+        "target1": { "class": ["EXECUTABLES"], "path": ["system/unknown/project1"], "dependencies": [] }
       }""")
       module_info_file.flush()
       repo_projects = {}
       with self.assertRaisesRegex(ValueError,
                                   'Unknown module path for module target1'):
-        manifest_split.get_module_info(module_info_file.name, repo_projects)
+        manifest_split.ModuleInfo(module_info_file.name, repo_projects)
+
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
+  def test_get_ninja_inputs(self, mock_check_output):
+    mock_check_output.return_value = b"""
+    path/to/input1
+    path/to/input2
+    path/to/TEST_MAPPING
+    path/to/MODULE_LICENSE_GPL
+    """
+
+    inputs = manifest_split.get_ninja_inputs('unused', 'unused', ['droid'])
+    self.assertEqual(inputs, {'path/to/input1', 'path/to/input2'})
+
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
+  def test_get_ninja_inputs_includes_test_mapping(self, mock_check_output):
+    mock_check_output.return_value = b"""
+    path/to/input1
+    path/to/input2
+    path/to/TEST_MAPPING
+    """
+
+    inputs = manifest_split.get_ninja_inputs('unused', 'unused',
+                                             ['droid', 'test_mapping'])
+    self.assertEqual(
+        inputs, {'path/to/input1', 'path/to/input2', 'path/to/TEST_MAPPING'})
 
   @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_get_kati_makefiles(self, mock_check_output):
@@ -113,9 +219,7 @@
       makefiles.append(symlink_dest)
 
       # Mock the output of ckati_stamp_dump:
-      mock_check_output.side_effect = [
-          '\n'.join(makefiles).encode(),
-      ]
+      mock_check_output.return_value = '\n'.join(makefiles).encode()
 
       kati_makefiles = manifest_split.get_kati_makefiles(
           'stamp-file', ['overlays/oem_overlay/'])
@@ -163,8 +267,10 @@
         '/tmp/absolute/path/file.java',
     ]
     self.assertEqual(
-        manifest_split.get_input_projects(repo_projects, inputs),
-        set(['platform/project1', 'platform/project2']))
+        manifest_split.get_input_projects(repo_projects, inputs), {
+            'platform/project1': ['system/project1/path/to/file.h'],
+            'platform/project2': ['system/project2/path/to/another_file.cc'],
+        })
 
   def test_update_manifest(self):
     manifest_contents = """
@@ -185,22 +291,16 @@
         ET.tostring(projects[0]).strip().decode(),
         '<project name="platform/project1" path="system/project1" />')
 
-  def test_create_manifest_sha1_element(self):
-    manifest = ET.ElementTree(ET.fromstring('<manifest></manifest>'))
-    manifest_sha1 = hashlib.sha1(ET.tostring(manifest.getroot())).hexdigest()
-    self.assertEqual(
-        ET.tostring(
-            manifest_split.create_manifest_sha1_element(
-                manifest, 'test_manifest')).decode(),
-        '<hash name="test_manifest" type="sha1" value="%s" />' % manifest_sha1)
-
   @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_create_split_manifest(self, mock_check_output):
     with tempfile.NamedTemporaryFile('w+t') as repo_list_file, \
       tempfile.NamedTemporaryFile('w+t') as manifest_file, \
       tempfile.NamedTemporaryFile('w+t') as module_info_file, \
       tempfile.NamedTemporaryFile('w+t') as config_file, \
-      tempfile.NamedTemporaryFile('w+t') as split_manifest_file:
+      tempfile.NamedTemporaryFile('w+t') as split_manifest_file, \
+      tempfile.TemporaryDirectory() as temp_dir:
+
+      os.chdir(temp_dir)
 
       repo_list_file.write("""
         system/project1 : platform/project1
@@ -208,7 +308,11 @@
         system/project3 : platform/project3
         system/project4 : platform/project4
         system/project5 : platform/project5
-        system/project6 : platform/project6""")
+        system/project6 : platform/project6
+        system/project7 : platform/project7
+        system/project8 : platform/project8
+        system/project9 : platform/project9
+        vendor/project1 : vendor/project1""")
       repo_list_file.flush()
 
       manifest_file.write("""
@@ -219,16 +323,23 @@
           <project name="platform/project4" path="system/project4" />
           <project name="platform/project5" path="system/project5" />
           <project name="platform/project6" path="system/project6" />
+          <project name="platform/project7" path="system/project7" />
+          <project name="platform/project8" path="system/project8" />
+          <project name="platform/project9" path="system/project9" />
+          <project name="vendor/project1" path="vendor/project1" />
         </manifest>""")
       manifest_file.flush()
 
       module_info_file.write("""{
-        "droid": { "path": ["system/project1"] },
-        "target_a": { "path": ["out/project2"] },
-        "target_b": { "path": ["system/project3"] },
-        "target_c": { "path": ["system/project4"] },
-        "target_d": { "path": ["system/project5"] },
-        "target_e": { "path": ["system/project6"] }
+        "droid": { "class": ["EXECUTABLES"], "path": ["system/project1"], "dependencies": [] },
+        "target_a": { "class": ["EXECUTABLES"], "path": ["out/project2"], "dependencies": ["unknown_module_a"] },
+        "target_b": { "class": ["EXECUTABLES"], "path": ["system/project3"], "dependencies": ["target_f", "unknown_module_b"] },
+        "target_c": { "class": ["EXECUTABLES"], "path": ["system/project4"], "dependencies": [] },
+        "target_d": { "class": ["EXECUTABLES"], "path": ["system/project5"], "dependencies": [] },
+        "target_e": { "class": ["EXECUTABLES"], "path": ["system/project6"], "dependencies": [] },
+        "target_f": { "class": ["HEADER_LIBRARIES"], "path": ["system/project7"], "dependencies": [] },
+        "target_g": { "class": ["SHARED_LIBRARIES"], "path": ["system/project8"], "dependencies": ["target_h"] },
+        "target_h": { "class": ["HEADER_LIBRARIES"], "path": ["system/project9"], "dependencies": [] }
       }""")
       module_info_file.flush()
 
@@ -253,9 +364,14 @@
       system/project4/file3
       """
 
+      product_makefile = 'vendor/project1/product.mk'
+      os.makedirs(os.path.dirname(product_makefile))
+      os.mknod(product_makefile)
+      kati_stamp_dump = product_makefile.encode()
+
       mock_check_output.side_effect = [
           ninja_inputs_droid,
-          b'',  # Unused kati makefiles. This is tested in its own method.
+          kati_stamp_dump,
           ninja_inputs_target_b,
           ninja_inputs_target_c,
       ]
@@ -267,11 +383,13 @@
         </config>""")
       config_file.flush()
 
+      debug_file = os.path.join(temp_dir, 'debug.json')
+
       manifest_split.create_split_manifest(
           ['droid'], manifest_file.name, split_manifest_file.name,
           [config_file.name], repo_list_file.name, 'build-target.ninja',
           'ninja', module_info_file.name, 'unused kati stamp',
-          ['unused overlay'])
+          ['unused overlay'], [], debug_file)
       split_manifest = ET.parse(split_manifest_file.name)
       split_manifest_projects = [
           child.attrib['name']
@@ -288,8 +406,217 @@
               'platform/project4',
               # Manual inclusion from config file
               'platform/project6',
+              # From target_b (depends on target_f header library)
+              'platform/project7',
+              # Inclusion from the Kati makefile stamp
+              'vendor/project1',
           ])
 
+      with open(debug_file) as debug_fp:
+        debug_data = json.load(debug_fp)
+
+        # Dependency for droid, but no other adjacent modules
+        self.assertTrue(debug_data['platform/project1']['direct_input'])
+        self.assertFalse(debug_data['platform/project1']['adjacent_input'])
+        self.assertFalse(debug_data['platform/project1']['deps_input'])
+
+        # Dependency for droid and an adjacent module
+        self.assertTrue(debug_data['platform/project3']['direct_input'])
+        self.assertTrue(debug_data['platform/project3']['adjacent_input'])
+        self.assertFalse(debug_data['platform/project3']['deps_input'])
+
+        # Dependency only for an adjacent module
+        self.assertFalse(debug_data['platform/project4']['direct_input'])
+        self.assertTrue(debug_data['platform/project4']['adjacent_input'])
+        self.assertFalse(debug_data['platform/project4']['deps_input'])
+
+        # Included via header library
+        self.assertFalse(debug_data['platform/project7']['direct_input'])
+        self.assertFalse(debug_data['platform/project7']['adjacent_input'])
+        self.assertTrue(debug_data['platform/project7']['deps_input'])
+
+        # Included due to the config file
+        self.assertEqual(
+            debug_data['platform/project6']['manual_add_config'],
+            config_file.name)
+
+        # Included due to the Kati makefile stamp
+        self.assertEqual(debug_data['vendor/project1']['kati_makefiles'][0],
+                         product_makefile)
+
+  @mock.patch.object(manifest_split, 'get_ninja_inputs', autospec=True)
+  @mock.patch.object(manifest_split, 'get_kati_makefiles', autospec=True)
+  @mock.patch.object(manifest_split.ModuleInfo, '__init__', autospec=True)
+  def test_create_split_manifest_skip_kati_module_info(self, mock_init,
+                                                       mock_get_kati_makefiles,
+                                                       mock_get_ninja_inputs):
+    with tempfile.NamedTemporaryFile('w+t') as repo_list_file, \
+            tempfile.NamedTemporaryFile('w+t') as manifest_file, \
+            tempfile.NamedTemporaryFile('w+t') as module_info_file, \
+            tempfile.NamedTemporaryFile('w+t') as config_file, \
+            tempfile.NamedTemporaryFile('w+t') as split_manifest_file, \
+            tempfile.TemporaryDirectory() as temp_dir:
+
+      os.chdir(temp_dir)
+
+      manifest_file.write("""
+        <manifest>
+        </manifest>""")
+      manifest_file.flush()
+
+      manifest_split.create_split_manifest(
+          targets=['droid'],
+          manifest_file=manifest_file.name,
+          split_manifest_file=split_manifest_file.name,
+          config_files=[],
+          repo_list_file=repo_list_file.name,
+          ninja_build_file='build-target.ninja',
+          ninja_binary='ninja',
+          kati_stamp_file=None,
+          module_info_file=None,
+          overlays=[],
+          installed_prebuilts=[],
+          debug_file=None)
+
+    mock_get_ninja_inputs.assert_called_with(
+        'ninja', 'build-target.ninja', ['droid'])
+    mock_get_kati_makefiles.assert_not_called()
+    mock_init.assert_not_called()
+
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
+  def test_create_split_manifest_installed_prebuilt(self, mock_check_output):
+
+    # The purpose of this test is to verify that create_split_manifests treats
+    # installed prebuilts as projects, even though the installed prebuilts are
+    # not in the manifest. This use case occurs when installed prebuilts
+    # contribute modules to the build, but the installed prebuilts themselves
+    # aren't sourced from the manifest.
+
+    with tempfile.NamedTemporaryFile('w+t') as repo_list_file, \
+      tempfile.NamedTemporaryFile('w+t') as manifest_file, \
+      tempfile.NamedTemporaryFile('w+t') as module_info_file, \
+      tempfile.NamedTemporaryFile('w+t') as split_manifest_file, \
+      tempfile.TemporaryDirectory() as temp_dir:
+
+      os.chdir(temp_dir)
+
+      repo_list_file.write("""
+        system/project1 : platform/project1
+        vendor/project1 : vendor/project1""")
+      repo_list_file.flush()
+
+      # Here we have small manifest that does not include "prebuilt/project3"
+      # or "prebuilt/project4".
+
+      manifest_file.write("""
+        <manifest>
+          <project name="platform/project1" path="system/project1" />
+          <project name="vendor/project1" path="vendor/project1" />
+        </manifest>""")
+      manifest_file.flush()
+
+      # Here's the module_info.json file. It contains modules whose paths are
+      # "prebuilt/project3" and "prebult/project4", which are not found in the
+      # manifest. Normally create_split_manifest doesn't tolerate a path that
+      # doesn't correspond to a manifest project. However, this test verifies
+      # that you can use these modules if you tell create_split_manifest about
+      # the installed prebuilts via a parameter.
+
+      module_info_file.write("""{
+        "droid": { "class": ["EXECUTABLES"], "path": ["system/project1"], "dependencies": [] },
+        "target_a": { "class": ["EXECUTABLES"], "path": ["system/project1"], "dependencies": ["target_b", "target_c"] },
+        "target_b": { "class": ["SHARED_LIBRARIES"], "path": ["prebuilt/project3"], "dependencies": [] },
+        "target_c": { "class": ["SHARED_LIBRARIES"], "path": ["prebuilt/project4"], "dependencies": [] }
+      }""")
+      module_info_file.flush()
+
+      # droid needs inputs from project1
+      ninja_inputs_droid = b"""
+      system/project1/file1
+      """
+
+      # target_a needs inputs from prebuilt/project3 and prebuilt/project4
+      ninja_inputs_target_a = b"""
+      prebuilt/project3/file2
+      prebuilt/project4/file3
+      """
+
+      # target_b needs inputs from prebuilt/project3
+      ninja_inputs_target_b = b"""
+      prebuilt/project3/file4
+      """
+
+      # target_c needs inputs from prebuilt/project4
+      ninja_inputs_target_c = b"""
+      prebuilt/project4/file5
+      """
+
+      product_makefile = 'vendor/project1/product.mk'
+      os.makedirs(os.path.dirname(product_makefile))
+      os.mknod(product_makefile)
+      kati_stamp_dump = product_makefile.encode()
+
+      mock_check_output.side_effect = [
+          ninja_inputs_droid,
+          kati_stamp_dump,
+          ninja_inputs_target_a,
+          ninja_inputs_target_b,
+          ninja_inputs_target_c,
+      ]
+
+      debug_file = os.path.join(temp_dir, 'debug.json')
+
+      manifest_split.create_split_manifest(
+          targets=['droid'],
+          manifest_file=manifest_file.name,
+          split_manifest_file=split_manifest_file.name,
+          config_files=[],
+          repo_list_file=repo_list_file.name,
+          ninja_build_file='build-target.ninja',
+          ninja_binary='ninja',
+          module_info_file=module_info_file.name,
+          kati_stamp_file='unused kati stamp',
+          overlays=['unused overlay'],
+
+          # This is a key part of the test. Passing these two "projects" as
+          # prebuilts allows create_split_manifest to recognize them as
+          # projects even though they are not in the manifest.
+
+          installed_prebuilts=['prebuilt/project3', 'prebuilt/project4'],
+
+          debug_file = debug_file)
+
+      split_manifest = ET.parse(split_manifest_file.name)
+
+      split_manifest_projects = [
+          child.attrib['name']
+          for child in split_manifest.getroot().findall('project')
+      ]
+
+      # Note that the installed prebuilts do not appear in the final split
+      # manfiest output because they were not in the manifest to begin with.
+
+      self.assertEqual(
+          split_manifest_projects,
+          [
+              # From droid
+              'platform/project1',
+              # Inclusion from the Kati makefile stamp
+              'vendor/project1',
+          ])
+
+      with open(debug_file) as debug_fp:
+        debug_data = json.load(debug_fp)
+
+        # Dependency for droid, but no other adjacent modules
+        self.assertTrue(debug_data['platform/project1']['direct_input'])
+        self.assertFalse(debug_data['platform/project1']['adjacent_input'])
+        self.assertFalse(debug_data['platform/project1']['deps_input'])
+
+        # Included due to the Kati makefile stamp
+        self.assertEqual(debug_data['vendor/project1']['kati_makefiles'][0],
+                         product_makefile)
+
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/split/sample_config.xml b/split/sample_config.xml
new file mode 100644
index 0000000..b9318ff
--- /dev/null
+++ b/split/sample_config.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8"?>
+<config>
+    <!-- treble_manifest_split config for aosp_arm64. -->
+
+    <!-- ============================================================= -->
+
+    <!-- Add projects that provide modules required for build success, but
+         are not detected automatically by the tool. -->
+
+    <!-- Lots of modules require py3-stdlib -->
+    <add_project name="platform/external/python/cpython3" />
+
+    <!-- bionic/libc:libc_jemalloc_wrapper requires libjemalloc5 -->
+    <add_project name="platform/external/jemalloc_new" />
+
+    <!-- libcore/ojluni:ojluni-phony requires art-notices-for-framework-stubs-jar -->
+    <add_project name="platform/prebuilts/module_sdk/art" />
+
+    <!-- Provides executable "as", needed by clang for some modules. -->
+    <add_project name="platform/prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9" />
+
+    <!-- ============================================================= -->
+
+    <!-- Remove some device projects that the tool included due to Kati makefile
+         scanning, but are not required to build aosp_arm64. -->
+    <remove_project name="device/generic/goldfish" />
+    <remove_project name="device/generic/goldfish-opengl" />
+    <remove_project name="device/google/atv" />
+    <remove_project name="device/google/trout" />
+    <remove_project name="device/ti/beagle-x15" />
+
+</config>
+
diff --git a/split/xml_diff.py b/split/xml_diff.py
new file mode 100644
index 0000000..81230a9
--- /dev/null
+++ b/split/xml_diff.py
@@ -0,0 +1,141 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""A library containing functions for diffing XML elements."""
+import textwrap
+from typing import Any, Callable, Dict, Set
+import xml.etree.ElementTree as ET
+import dataclasses
+
+Element = ET.Element
+
+_INDENT = (' ' * 2)
+
+
+@dataclasses.dataclass
+class Change:
+  value_from: str
+  value_to: str
+
+  def __repr__(self):
+    return f'{self.value_from} -> {self.value_to}'
+
+
+@dataclasses.dataclass
+class ChangeMap:
+  """A collection of changes broken down by added, removed and modified.
+
+  Attributes:
+    added: A dictionary of string identifiers to the added string.
+    removed: A dictionary of string identifiers to the removed string.
+    modified: A dictionary of string identifiers to the changed object.
+  """
+  added: Dict[str, str] = dataclasses.field(default_factory=dict)
+  removed: Dict[str, str] = dataclasses.field(default_factory=dict)
+  modified: Dict[str, Any] = dataclasses.field(default_factory=dict)
+
+  def __repr__(self):
+    ret_str = ''
+    if self.added:
+      ret_str += 'Added:\n'
+      for value in self.added.values():
+        ret_str += textwrap.indent(str(value) + '\n', _INDENT)
+    if self.removed:
+      ret_str += 'Removed:\n'
+      for value in self.removed.values():
+        ret_str += textwrap.indent(str(value) + '\n', _INDENT)
+    if self.modified:
+      ret_str += 'Modified:\n'
+      for name, value in self.modified.items():
+        ret_str += textwrap.indent(name + ':\n', _INDENT)
+        ret_str += textwrap.indent(str(value) + '\n', _INDENT * 2)
+    return ret_str
+
+  def __bool__(self):
+    return bool(self.added) or bool(self.removed) or bool(self.modified)
+
+
+def element_string(e: Element) -> str:
+  return ET.tostring(e).decode(encoding='UTF-8').strip()
+
+
+def attribute_changes(e1: Element, e2: Element,
+                      ignored_attrs: Set[str]) -> ChangeMap:
+  """Get the changes in attributes between two XML elements.
+
+  Arguments:
+    e1: the first xml element.
+    e2: the second xml element.
+    ignored_attrs: a set of attribute names to ignore changes.
+
+  Returns:
+    A ChangeMap of attribute changes. Keyed by attribute name.
+  """
+  changes = ChangeMap()
+  attributes = set(e1.keys()) | set(e2.keys())
+  for attr in attributes:
+    if attr in ignored_attrs:
+      continue
+    a1 = e1.get(attr)
+    a2 = e2.get(attr)
+    if a1 == a2:
+      continue
+    elif not a1:
+      changes.added[attr] = a2 or ''
+    elif not a2:
+      changes.removed[attr] = a1
+    else:
+      changes.modified[attr] = Change(value_from=a1, value_to=a2)
+  return changes
+
+
+def compare_subelements(
+    tag: str,
+    p1: Element,
+    p2: Element,
+    ignored_attrs: Set[str],
+    key_fn: Callable[[Element], str],
+    diff_fn: Callable[[Element, Element, Set[str]], Any]) -> ChangeMap:
+  """Get the changes between subelements of two parent elements.
+
+  Arguments:
+    tag: tag name for children element.
+    p1: the base parent xml element.
+    p2: the parent xml element to compare
+    ignored_attrs: a set of attribute names to ignore changes.
+    key_fn: Function that takes a subelement and returns a key
+    diff_fn: Function that take two subelements and a set of ignored
+      attributes, returns the differences
+
+  Returns:
+    A ChangeMap object of the changes.
+  """
+  changes = ChangeMap()
+  group1 = {}
+  for e1 in p1.findall(tag):
+    group1[key_fn(e1)] = e1
+
+  for e2 in p2.findall(tag):
+    key = key_fn(e2)
+    e1 = group1.pop(key, None)
+    if e1 is None:
+      changes.added[key] = element_string(e2)
+    else:
+      echange = diff_fn(e1, e2, ignored_attrs)
+      if echange:
+        changes.modified[key] = echange
+
+  for name, e1 in group1.items():
+    changes.removed[name] = element_string(e1)
+
+  return changes
diff --git a/split/xml_diff_test.py b/split/xml_diff_test.py
new file mode 100644
index 0000000..aef4562
--- /dev/null
+++ b/split/xml_diff_test.py
@@ -0,0 +1,63 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Test XML diff."""
+import unittest
+import xml.etree.ElementTree as ET
+
+import xml_diff
+
+
+class XmlDiffTest(unittest.TestCase):
+
+  def test_attribute_changes(self):
+    e1 = ET.fromstring('<node attr1="hello" attr2="hello2" ignored="me"/>')
+    e2 = ET.fromstring('<node attr3="hello3" attr2="bye2"/>')
+    changes = xml_diff.attribute_changes(e1, e2, set(['ignored']))
+    self.assertEqual(
+        xml_diff.ChangeMap(
+            added={'attr3': 'hello3'},
+            removed={'attr1': 'hello'},
+            modified={'attr2': xml_diff.Change('hello2', 'bye2')}), changes)
+
+  def test_compare_subelements(self):
+    p1 = ET.fromstring("""<parent>
+      <tag1 attr="newfile2" attrkey="notneeded" />
+      <tag1 attr="oldfile1" attrkey="dest1" />
+      <tag2 attr="oldfile2" attrkey="dest2" />
+    </parent>
+    """)
+    p2 = ET.fromstring("""<parent>
+      <tag1 attr="newfile1" attrkey="dest1" />
+      <tag2 attr="newfile2" attrkey="dest2" />
+      <tag2 attr="somefile" attrkey="addedfile" />
+    </parent>
+    """)
+
+    changes = xml_diff.compare_subelements(
+        tag='tag1',
+        p1=p1,
+        p2=p2,
+        ignored_attrs=set(),
+        key_fn=lambda x: x.get('attrkey'),
+        diff_fn=xml_diff.attribute_changes)
+    self.assertEqual(changes.added, {})
+    self.assertEqual(
+        changes.removed,
+        {'notneeded': '<tag1 attr="newfile2" attrkey="notneeded" />'})
+    self.assertEqual(
+        changes.modified, {
+            'dest1':
+                xml_diff.ChangeMap(
+                    modified={'attr': xml_diff.Change('oldfile1', 'newfile1')})
+        })
diff --git a/vf/merge.sh b/vf/merge.sh
new file mode 100755
index 0000000..20ceb6b
--- /dev/null
+++ b/vf/merge.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+
+# Thin wrapper around merge_target_files for vendor-frozen targets to
+# allow flag changes to be made in a presubmit-guarded change.
+
+set -e
+
+while getopts ":t:d:v:b:m:" option ; do
+  case "${option}" in
+    t) TARGET=${OPTARG} ;;
+    d) DIST_DIR=${OPTARG} ;;
+    v) VENDOR_DIR=${OPTARG} ;;
+    b) BUILD_ID=${OPTARG} ;;
+    # TODO(b/170638547) Remove the need for merge configs.
+    m) MERGE_CONFIG_DIR=${OPTARG} ;;
+    *) echo "Unexpected argument: -${OPTARG}" >&2 ;;
+  esac
+done
+
+if [[ -z "${TARGET}" ]]; then
+  echo "error: -t target argument not set"
+  exit 1
+fi
+if [[ -z "${DIST_DIR}" ]]; then
+  echo "error: -d dist dir argument not set"
+  exit 1
+fi
+if [[ -z "${VENDOR_DIR}" ]]; then
+  echo "error: -v vendor dir argument not set"
+  exit 1
+fi
+if [[ -z "${BUILD_ID}" ]]; then
+  echo "error: -b build id argument not set"
+  exit 1
+fi
+if [[ -z "${MERGE_CONFIG_DIR}" ]]; then
+  echo "error: -m merge config dir argument not set"
+  exit 1
+fi
+
+# Move the system-only build artifacts to a separate folder
+# so that the flashing tools use the merged files instead.
+readonly SYSTEM_DIR=${DIST_DIR}/system_build
+mkdir -p ${SYSTEM_DIR}
+mv -f ${DIST_DIR}/android-info.txt ${SYSTEM_DIR}
+mv -f ${DIST_DIR}/${TARGET}-*.zip ${SYSTEM_DIR}
+
+source build/envsetup.sh
+lunch ${TARGET}-userdebug
+
+out/host/linux-x86/bin/merge_target_files \
+  --framework-target-files ${SYSTEM_DIR}/${TARGET}-target_files*.zip \
+  --vendor-target-files ${VENDOR_DIR}/*-target_files-*.zip \
+  --framework-item-list ${MERGE_CONFIG_DIR}/framework_item_list.txt \
+  --framework-misc-info-keys ${MERGE_CONFIG_DIR}/framework_misc_info_keys.txt \
+  --vendor-item-list ${MERGE_CONFIG_DIR}/vendor_item_list.txt \
+  --allow-duplicate-apkapex-keys \
+  --output-target-files ${DIST_DIR}/${TARGET}-target_files-${BUILD_ID}.zip \
+  --output-img  ${DIST_DIR}/${TARGET}-img-${BUILD_ID}.zip \
+  --output-ota  ${DIST_DIR}/${TARGET}-ota-${BUILD_ID}.zip
+
+# Copy bootloader.img, radio.img, and android-info.txt, needed for flashing.
+cp ${VENDOR_DIR}/bootloader.img ${DIST_DIR}/bootloader.img
+cp ${VENDOR_DIR}/radio.img ${DIST_DIR}/radio.img
+unzip -j -d ${DIST_DIR} \
+  ${VENDOR_DIR}/*-target_files-*.zip \
+  OTA/android-info.txt