Snap for 8730993 from dc4c38f9f2586ec3dc0ccd611caa589b6452603e to mainline-tzdata3-release

Change-Id: I8bfe67e988418375cf11d79e7a8ce8d7e4c2c025
diff --git a/build/sandbox/build_android_sandboxed.py b/build/sandbox/build_android_sandboxed.py
index 8518074..f6a1b57 100644
--- a/build/sandbox/build_android_sandboxed.py
+++ b/build/sandbox/build_android_sandboxed.py
@@ -23,19 +23,10 @@
   '/src/tools/treble/build/sandbox/build_android_target.sh'
 
 
-def build(build_target,
-          variant,
-          nsjail_bin,
-          chroot,
-          dist_dir,
-          build_id,
-          max_cpus,
-          build_goals,
-          config_file=None,
-          command_wrapper=_DEFAULT_COMMAND_WRAPPER,
-          use_rbe=False,
-          readonly_bind_mounts=[],
-          env=[]):
+def build(build_target, variant, nsjail_bin, chroot, dist_dir, build_id,
+          max_cpus, build_goals, config_file=None,
+          command_wrapper=_DEFAULT_COMMAND_WRAPPER, use_rbe=False,
+          readonly_bind_mount=None, env=[]):
   """Builds an Android target in a secure sandbox.
 
   Args:
@@ -51,9 +42,9 @@
     config_file: A string path to an overlay configuration file.
     command_wrapper: A string path to the command wrapper.
     use_rbe: If true, will attempt to use RBE for the build.
-    readonly_bind_mounts: A list of string paths to be mounted as read-only.
-    env: An array of environment variables to define in the NsJail sandbox in
-      the `var=val` syntax.
+    readonly_bind_mount: A string path to a path to be mounted as read-only.
+    env: An array of environment variables to define in the NsJail sandbox in the
+      `var=val` syntax.
 
   Returns:
     A list of commands that were executed. Each command is a list of strings.
@@ -62,8 +53,7 @@
     cfg = config.Config(config_file)
     android_target = cfg.get_build_config_android_target(build_target)
     if cfg.has_tag(build_target, 'skip'):
-      print('Warning: skipping build_target "{}" due to tag being set'.format(
-          build_target))
+      print('Warning: skipping build_target "{}" due to tag being set'.format(build_target))
       return []
   else:
     android_target = build_target
@@ -79,6 +69,10 @@
       '-j',
   ] + build_goals
 
+  readonly_bind_mounts = []
+  if readonly_bind_mount:
+    readonly_bind_mounts = [readonly_bind_mount]
+
   extra_nsjail_args = []
   cleanup = lambda: None
   nsjail_wrapper = []
@@ -114,11 +108,15 @@
   # Use the top level module docstring for the help description
   parser = argparse.ArgumentParser(
       description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
-  parser.add_argument('--build_target', help='The build target.')
+  parser.add_argument(
+      '--build_target',
+      help='The build target.')
   parser.add_argument(
       '--variant', default='userdebug', help='The Android build variant.')
   parser.add_argument(
-      '--nsjail_bin', required=True, help='Path to NsJail binary.')
+      '--nsjail_bin',
+      required=True,
+      help='Path to NsJail binary.')
   parser.add_argument(
       '--chroot',
       required=True,
@@ -133,17 +131,13 @@
       '--command_wrapper',
       default=_DEFAULT_COMMAND_WRAPPER,
       help='Path to the command wrapper. '
-      'Defaults to \'%s\'.' % _DEFAULT_COMMAND_WRAPPER)
+        'Defaults to \'%s\'.' % _DEFAULT_COMMAND_WRAPPER)
   parser.add_argument(
       '--readonly_bind_mount',
-      type=str,
-      default=[],
-      action='append',
       help='Path to the a path to be mounted as readonly inside the secure '
-      'build sandbox. Can be specified multiple times')
+      'build sandbox.')
   parser.add_argument(
-      '--env',
-      '-e',
+      '--env', '-e',
       type=str,
       default=[],
       action='append',
@@ -169,7 +163,9 @@
       help='One or more contexts used to select build goals from the '
       'configuration.')
   parser.add_argument(
-      '--use_rbe', action='store_true', help='Executes the build on RBE')
+      '--use_rbe',
+      action='store_true',
+      help='Executes the build on RBE')
   return parser
 
 
@@ -195,7 +191,6 @@
 
   cfg = config.Config(args['config_file'])
   build_goals = cfg.get_build_goals(args['build_target'], set(args['context']))
-  build_flags = cfg.get_build_flags(args['build_target'], set(args['context']))
 
   build(
       build_target=args['build_target'],
@@ -204,13 +199,13 @@
       chroot=args['chroot'],
       config_file=args['config_file'],
       command_wrapper=args['command_wrapper'],
-      readonly_bind_mounts=args['readonly_bind_mount'],
+      readonly_bind_mount=args['readonly_bind_mount'],
       env=args['env'],
       dist_dir=args['dist_dir'],
       build_id=args['build_id'],
       max_cpus=args['max_cpus'],
       use_rbe=args['use_rbe'],
-      build_goals=build_goals + build_flags)
+      build_goals=build_goals)
 
 
 if __name__ == '__main__':
diff --git a/build/sandbox/build_android_target.sh b/build/sandbox/build_android_target.sh
index 266bb71..23e05ea 100755
--- a/build/sandbox/build_android_target.sh
+++ b/build/sandbox/build_android_target.sh
@@ -40,16 +40,6 @@
 
 set -e
 
-BUILD_COMMAND_ARRAY=($BUILD_COMMAND)
-for i in ${BUILD_COMMAND_ARRAY[@]};
-do
-  if [[ $i =~ ^[A-Z_][A-Z0-9_]*= ]];
-  then
-    echo "build_android_target.sh: export $i";
-    export $i;
-  fi;
-done;
-
 echo "build_android_target.sh: source build/envsetup.sh"
 source build/envsetup.sh
 echo "build_android_target.sh: lunch $ANDROID_TARGET"
@@ -64,7 +54,7 @@
 set +e
 
 echo "build_android_target.sh: $BUILD_COMMAND"
-eval $BUILD_COMMAND
+$BUILD_COMMAND
 BUILD_COMMAND_EXIT_VALUE=$?
 
 # Collect RBE metrics if enabled
diff --git a/build/sandbox/config.py b/build/sandbox/config.py
index a1be52a..26bccbd 100644
--- a/build/sandbox/config.py
+++ b/build/sandbox/config.py
@@ -11,6 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+
 """Parses config file and provides various ways of using it."""
 
 import xml.etree.ElementTree as ET
@@ -106,7 +107,6 @@
 
 Overlay = collections.namedtuple('Overlay', ['name', 'replacement_paths'])
 
-
 class BuildConfig(object):
   """Represents configuration of a build_target.
 
@@ -116,8 +116,8 @@
     tags: List of tags associated with the build target config
     build_goals: List of goals to be used while building the target.
     overlays: List of overlays to be mounted.
-    views: A list of (source, destination) string path tuple to be mounted. See
-      view nodes in XML.
+    views: A list of (source, destination) string path tuple to be mounted.
+      See view nodes in XML.
     allow_readwrite_all: If true, mount source tree as rw.
     allow_readwrite: List of directories to be mounted as rw.
     allowed_projects_file: a string path name of a file with a containing
@@ -130,7 +130,6 @@
                android_target,
                tags=frozenset(),
                build_goals=(),
-               build_flags=(),
                overlays=(),
                views=(),
                allow_readwrite_all=False,
@@ -142,7 +141,6 @@
     self.android_target = android_target
     self.tags = tags
     self.build_goals = list(build_goals)
-    self.build_flags = list(build_flags)
     self.overlays = list(overlays)
     self.views = list(views)
     self.allow_readwrite_all = allow_readwrite_all
@@ -163,7 +161,8 @@
 
   @classmethod
   def from_config(cls, config_elem, fs_view_map, base_config=None):
-    """Creates a BuildConfig from a config XML element and an optional base_config.
+    """Creates a BuildConfig from a config XML element and an optional
+      base_config.
 
     Args:
       config_elem: the config XML node element to build the configuration
@@ -189,8 +188,6 @@
             'allowed_projects_file', base_config.allowed_projects_file),
         build_goals=_get_build_config_goals(config_elem,
                                             base_config.build_goals),
-        build_flags=_get_build_config_flags(config_elem,
-                                            base_config.build_flags),
         tags=_get_config_tags(config_elem, base_config.tags),
         overlays=_get_overlays(config_elem, base_config.overlays),
         allow_readwrite=_get_allow_readwrite(config_elem,
@@ -199,7 +196,8 @@
         allow_readwrite_all=_get_allowed_readwrite_all(
             config_elem, base_config.allow_readwrite_all),
         configurations=_get_configurations(config_elem,
-                                           base_config.configurations))
+                                           base_config.configurations)
+    )
 
 
 def _get_configurations(config_elem, base):
@@ -228,13 +226,6 @@
                  for goal in config_elem.findall('goal')]
 
 
-def _get_build_config_flags(config_elem, base=None):
-  """See _get_build_config_goals. Gets 'flag' instead of 'goal'."""
-  return base + [(goal.get('name'), set(goal.get('contexts').split(','))
-                  if goal.get('contexts') else None)
-                 for goal in config_elem.findall('flag')]
-
-
 def _get_config_tags(config_elem, base=frozenset()):
   """Retrieves tags from build_config or target.
 
@@ -250,12 +241,13 @@
 
 
 def _get_allowed_readwrite_all(config_elem, default=False):
-  """Determines if build_config or target is set to allow readwrite for all source paths.
+  """Determines if build_config or target is set to allow readwrite for all
+    source paths.
 
   Args:
     config_elem: A build_config or target xml element.
-    default: Value to use if element doesn't contain the allow_readwrite_all
-      attribute.
+    default: Value to use if element doesn't contain the
+      allow_readwrite_all attribute.
 
   Returns:
     True if build config is set to allow readwrite for all sorce paths
@@ -272,8 +264,7 @@
     base: Initial list of overlays to prepend to the list
 
   Returns:
-    A list of tuples of overlays and replacement paths to mount for a
-    build_config or target.
+    A list of tuples of overlays and replacement paths to mount for a build_config or target.
   """
   overlays = []
   for overlay in config_elem.findall('overlay'):
@@ -285,7 +276,6 @@
             ])))
   return base + overlays
 
-
 def _get_views(config_elem, fs_view_map, base=None):
   """Retrieves list of views from build_config or target.
 
@@ -297,14 +287,13 @@
     A list of (source, destination) string path tuple to be mounted. See view
       nodes in XML.
   """
-  return base + [
-      fs for o in config_elem.findall('view')
-      for fs in fs_view_map[o.get('name')]
-  ]
+  return base + [fs for o in config_elem.findall('view')
+                 for fs in fs_view_map[o.get('name')]]
 
 
 def _get_allow_readwrite(config_elem, base=None):
-  """Retrieves list of directories to be mounted rw from build_config or target.
+  """Retrieves list of directories to be mounted rw from build_config or
+    target.
 
   Args:
     config_elem: A build_config or target xml element.
@@ -461,18 +450,6 @@
 
     return build_goals
 
-  def get_build_flags(self, build_target, contexts=frozenset()):
-    """See get_build_goals. Gets flags instead of goals."""
-    build_flags = []
-    for flag, build_contexts in self._build_config_map[
-        build_target].build_flags:
-      if not build_contexts:
-        build_flags.append(flag)
-      elif build_contexts.intersection(contexts):
-        build_flags.append(flag)
-
-    return build_flags
-
   def get_rw_allowlist_map(self):
     """Return read-write allowlist map.
 
@@ -501,18 +478,19 @@
       overlay names corresponding to the target.
     """
     return {
-        b.name: [o.name for o in b.overlays
-                ] for b in self._build_config_map.values()
+        b.name : [o.name for o in b.overlays
+                 ] for b in self._build_config_map.values()
     }
 
+
   def get_fs_view_map(self):
     """Return the filesystem view map.
-
     Returns:
       A dict of filesystem views keyed by target name. A filesystem view is a
       list of (source, destination) string path tuples.
     """
-    return {b.name: b.views for b in self._build_config_map.values()}
+    return {b.name : b.views for b in self._build_config_map.values()}
+
 
   def get_build_config(self, build_target):
     return self._build_config_map[build_target]
diff --git a/build/sandbox/config_test.py b/build/sandbox/config_test.py
index 139b5f4..002c625 100644
--- a/build/sandbox/config_test.py
+++ b/build/sandbox/config_test.py
@@ -21,7 +21,7 @@
   <target name="android_target_1">
     <build_config>
       <goal name="droid"/>
-      <flag name="dist"/>
+      <goal name="dist"/>
     </build_config>
   </target>
   <target name="android_target_2" tags="cool,hot">
@@ -30,14 +30,14 @@
     <goal name="common_goal"/>
     <build_config tags="warm">
       <goal name="droid"/>
-      <flag name="dist"/>
+      <goal name="dist"/>
       <goal name="goal_for_android_target_2"/>
     </build_config>
     <build_config name="build_target_2" tags="dry">
       <config name="fmc_framework_images" value="bt1,bt2"/>
       <config name="fmc_misc_info_keys" value="misc_info_keys_2.txt"/>
       <goal name="droid"/>
-      <flag name="VAR=a"/>
+      <goal name="VAR=a"/>
     </build_config>
   </target>
   <target name="android_target_3" tags="">
@@ -67,10 +67,10 @@
       <goal name="always" contexts=""/>
 
       <!-- selected if ci context requested -->
-      <flag name="dist" contexts="ci"/>
+      <goal name="dist" contexts="ci"/>
 
       <!-- selected if x context requested -->
-      <flag name="VAR=value" contexts="x"/>
+      <goal name="VAR=value" contexts="x"/>
 
       <!-- selected if ci or x context requested -->
       <goal name="extra_goal" contexts="ci,x"/>
@@ -80,7 +80,6 @@
 </config>
 """
 
-
 class ConfigTest(unittest.TestCase):
   """unittest for Config."""
 
@@ -160,28 +159,32 @@
           cfg.get_build_config_android_target('some_target'),
           'android_target_4')
 
-  def testBuildTargetToBuildGoalsAndFlags(self):
+  def testBuildTargetToBuildGoals(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
       test_config.write(_TEST_CONFIG_XML)
       test_config.flush()
       cfg = config.factory(test_config.name)
 
-      self.assertEqual(cfg.get_build_goals('android_target_1'), ['droid'])
-      self.assertEqual(cfg.get_build_flags('android_target_1'), ['dist'])
+      # Test that build_target android_target_1 has goals droid and dist.
+      self.assertEqual(
+          cfg.get_build_goals('android_target_1'),
+          ['droid', 'dist'])
 
+      # Test that build_target android_target_2 has goals droid, dist, and
+      # goal_for_android_target_2.
       self.assertEqual(
           cfg.get_build_goals('android_target_2'),
-          ['common_goal', 'droid', 'goal_for_android_target_2'])
-      self.assertEqual(cfg.get_build_flags('android_target_2'), ['dist'])
+          ['common_goal', 'droid', 'dist', 'goal_for_android_target_2'])
 
+      # Test that build_target build_target_2 has goals droid and VAR=a.
       self.assertEqual(
-          cfg.get_build_goals('build_target_2'), ['common_goal', 'droid'])
-      self.assertEqual(cfg.get_build_flags('build_target_2'), ['VAR=a'])
+          cfg.get_build_goals('build_target_2'),
+          ['common_goal', 'droid', 'VAR=a'])
 
       # Test empty goals
-      self.assertEqual(cfg.get_build_goals('no_goals_target'), [])
+      self.assertEqual(cfg.get_build_goals('no_goals_target'),[])
 
-  def testBuildTargetToBuildGoalsAndFlagsWithContexts(self):
+  def testBuildTargetToBuildGoalsWithContexts(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
       test_config.write(_TEST_CONTEXTS_CONFIG_XML)
       test_config.flush()
@@ -209,19 +212,19 @@
       # the x goals.
 
       build_goals = cfg.get_build_goals('test_target', set(['x']))
-      build_flags = cfg.get_build_flags('test_target', set(['x']))
 
-      self.assertEqual(build_goals, ['droid', 'always', 'extra_goal'])
-      self.assertEqual(build_flags, ['VAR=value'])
+      self.assertEqual(
+          build_goals,
+          ['droid', 'always', 'VAR=value', 'extra_goal'])
 
       # Test that when requested_contexts is set(['ci', 'x']), we select the
       # "always" goals, the ci goals, and the x goals.
 
       build_goals = cfg.get_build_goals('test_target', set(['ci', 'x']))
-      build_flags = cfg.get_build_flags('test_target', set(['ci', 'x']))
 
-      self.assertEqual(build_goals, ['droid', 'always', 'extra_goal'])
-      self.assertEqual(build_flags, ['dist', 'VAR=value'])
+      self.assertEqual(
+          build_goals,
+          ['droid', 'always', 'dist', 'VAR=value', 'extra_goal'])
 
   def testAllowReadWriteAll(self):
     with tempfile.NamedTemporaryFile('w+t') as test_config:
@@ -289,19 +292,16 @@
       cfg = config.factory(test_config.name)
 
       bc_at2 = cfg.get_build_config('android_target_2')
-      self.assertDictEqual(
-          bc_at2.configurations, {
-              'fmc_framework_images': 'image1,image2',
-              'fmc_misc_info_keys': 'misc_info_keys.txt'
-          })
+      self.assertDictEqual(bc_at2.configurations, {
+        'fmc_framework_images': 'image1,image2',
+        'fmc_misc_info_keys': 'misc_info_keys.txt'
+      })
 
       bc_bt2 = cfg.get_build_config('build_target_2')
-      self.assertDictEqual(
-          bc_bt2.configurations, {
-              'fmc_framework_images': 'bt1,bt2',
-              'fmc_misc_info_keys': 'misc_info_keys_2.txt'
-          })
-
+      self.assertDictEqual(bc_bt2.configurations, {
+        'fmc_framework_images': 'bt1,bt2',
+        'fmc_misc_info_keys': 'misc_info_keys_2.txt'
+      })
 
 if __name__ == '__main__':
   unittest.main()
diff --git a/build/sandbox/nsjail.py b/build/sandbox/nsjail.py
index 4d23040..c388d0b 100644
--- a/build/sandbox/nsjail.py
+++ b/build/sandbox/nsjail.py
@@ -351,17 +351,7 @@
     print(' '.join(nsjail_command), file=stdout)
 
   if not dry_run:
-    try:
-      subprocess.check_call(nsjail_command, stdout=stdout, stderr=stderr)
-    except subprocess.CalledProcessError as error:
-      if len(error.cmd) > 13:
-        cmd = error.cmd[:6] + ['...elided...'] + error.cmd[-6:]
-      else:
-        cmd = error.cmd
-      msg = 'nsjail command %s failed with return code %d' % (cmd, error.returncode)
-      # Raise from None to avoid exception chaining.
-      raise RuntimeError(msg) from None
-
+    subprocess.check_call(nsjail_command, stdout=stdout, stderr=stderr)
 
 def parse_args():
   """Parse command line arguments.
diff --git a/build/sandbox/nsjail_test.py b/build/sandbox/nsjail_test.py
index 8ea93ef..a73bbdb 100644
--- a/build/sandbox/nsjail_test.py
+++ b/build/sandbox/nsjail_test.py
@@ -75,7 +75,7 @@
       self.assertEqual(stdout, expected)
 
   def testFailingJailedCommand(self):
-    with self.assertRaises(RuntimeError):
+    with self.assertRaises(subprocess.CalledProcessError):
       nsjail.run(
           nsjail_bin='/bin/false',
           chroot='/chroot',
diff --git a/build/sandbox/rbe.py b/build/sandbox/rbe.py
index 6d959b8..fba368f 100644
--- a/build/sandbox/rbe.py
+++ b/build/sandbox/rbe.py
@@ -40,10 +40,6 @@
     'RBE_JAVAC': 'true',
     'RBE_D8': 'true',
     'RBE_R8': 'true',
-    'RBE_CXX_EXEC_STRATEGY' : 'racing',
-    'RBE_JAVAC_EXEC_STRATEGY' : 'racing',
-    'RBE_R8_EXEC_STRATEGY' : 'racing',
-    'RBE_D8_EXEC_STRATEGY' : 'racing',
 }
 
 
diff --git a/build/sandbox/sample_config.xml b/build/sandbox/sample_config.xml
index 3c75217..dbbd412 100644
--- a/build/sandbox/sample_config.xml
+++ b/build/sandbox/sample_config.xml
@@ -3,19 +3,19 @@
 Defines sample build configuration file.
 -->
 <config>
-  <target name="aosp_cf_x86_64_phone_default" android_target="aosp_cf_x86_64_phone"
+  <target name="aosp_cf_x86_phone_default" android_target="aosp_cf_x86_phone" 
       allow_readwrite_all="true">
     <!-- Target elements can define elements and attributes that are inherited
          by build_config child elements. -->
     <goal name="droid"/>
     <build_config>
-      <!-- build_config name will inherit the name aosp_cf_x86_64_phone_default
-           and append dist to the flag list. -->
-      <flag name="dist"/>
+      <!-- build_config name will inherit the name aosp_cf_x86_phone_default
+           and append dist to the goal list. -->
+      <goal name="dist"/>
     </build_config>
-    <build_config name="aosp_cf_x86_64_phone_no_dist" tags="skip">
+    <build_config name="aosp_cf_x86_phone_no_dist" tags="skip">
     </build_config>
-    <build_config name="aosp_cf_x86_64_phone_ro" allow_readwrite_all="false">
+    <build_config name="aosp_cf_x86_phone_ro" allow_readwrite_all="false">
       <!-- This build_config will override allow_readwrite_all attribute. -->
     </build_config>
     <build_config name="aosp_cf_x86_tv" android_target="aosp_cf_x86_tv">
@@ -26,7 +26,7 @@
     <!-- If android_target isn't provided target will use name as default
          android_target. -->
     <goal name="droid"/>
-    <flag name="dist"/>
+    <goal name="dist"/>
     <build_config>
       <!-- build_config will inherit the name and android_target: 
            aosp_car_arm64. -->
diff --git a/fetcher/fetcher_lib.py b/fetcher/fetcher_lib.py
index 9701494..0ec0173 100644
--- a/fetcher/fetcher_lib.py
+++ b/fetcher/fetcher_lib.py
@@ -103,8 +103,7 @@
   Returns:
     An authorized android build api client.
   """
-  return build(serviceName='androidbuildinternal', version='v2beta1', http=http,
-               static_discovery=False)
+  return build(serviceName='androidbuildinternal', version='v2beta1', http=http)
 
 
 def create_client_from_json_keyfile(json_keyfile_name=None):
diff --git a/gki/Android.bp b/gki/Android.bp
deleted file mode 100644
index d5b886d..0000000
--- a/gki/Android.bp
+++ /dev/null
@@ -1,50 +0,0 @@
-package {
-    // See: http://go/android-license-faq
-    // A large-scale-change added 'default_applicable_licenses' to import
-    // all of the 'license_kinds' from "tools_treble_license"
-    // to get the below license kinds:
-    //   SPDX-license-identifier-Apache-2.0
-    default_applicable_licenses: ["tools_treble_license"],
-}
-
-python_defaults {
-  name: "repack_gki_defaults",
-  version: {
-      py2: {
-          enabled: false,
-          embedded_launcher: false,
-      },
-      py3: {
-          enabled: true,
-          embedded_launcher: false,
-      },
-  },
-}
-
-python_library_host {
-  name: "repack_gki_lib",
-  defaults: ["repack_gki_defaults"],
-  srcs: [
-    "repack_gki_lib.py",
-  ],
-  libs: [
-    "fetcher-lib",
-  ],
-  pkg_path: "treble/gki",
-}
-
-python_binary_host {
-    name: "repack_gki",
-    main: "repack_gki.py",
-    defaults: ["repack_gki_defaults"],
-    srcs: [
-        "repack_gki.py",
-    ],
-    libs: [
-        "repack_gki_lib",
-    ],
-    required: [
-        "mkbootimg",
-        "unpack_bootimg",
-    ],
-}
diff --git a/gki/repack_gki.py b/gki/repack_gki.py
deleted file mode 100644
index 90b632e..0000000
--- a/gki/repack_gki.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""Repacks GKI boot images with the given kernel images."""
-import argparse
-import json
-import os
-import shutil
-import tempfile
-
-from treble.fetcher import fetcher_lib
-from treble.gki import repack_gki_lib
-
-
-def main():
-  parser = argparse.ArgumentParser(
-      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
-  parser.add_argument(
-      '--json_keyfile',
-      help='JSON keyfile containing credentials. '
-      '(Default: Use default credential file)')
-  parser.add_argument(
-      '--ramdisk_build_id',
-      required=True,
-      help='Download from the specified build.')
-  parser.add_argument(
-      '--ramdisk_target',
-      required=True,
-      help='Name of the ramdisk target from the ramdisk branch.')
-  parser.add_argument(
-      '--kernel_build_id',
-      required=True,
-      help='Download from the specified build.')
-  parser.add_argument(
-      '--kernel_target',
-      required=True,
-      help='Name of the kernel target from the kernel branch.')
-  parser.add_argument(
-      '--kernel_debug_target',
-      required=True,
-      help='Name of the kernel debug target from the kernel branch.')
-  parser.add_argument(
-      '--kernel_version',
-      required=True,
-      help='The Kernel version to use when repacking.')
-  parser.add_argument(
-      '--out_dir', required=True, help='Save output to this directory.')
-
-  args = parser.parse_args()
-  client = fetcher_lib.create_client_from_json_keyfile(
-      json_keyfile_name=args.json_keyfile)
-
-  if not os.path.exists(args.out_dir):
-    os.makedirs(args.out_dir)
-
-  with tempfile.TemporaryDirectory() as tmp_bootimg_dir, \
-      tempfile.TemporaryDirectory() as tmp_kernel_dir:
-    # Fetch boot images.
-    repack_gki_lib.fetch_bootimg(
-        client=client,
-        out_dir=tmp_bootimg_dir,
-        build_id=args.ramdisk_build_id,
-        kernel_version=args.kernel_version,
-        target=args.ramdisk_target,
-    )
-
-    # Fetch kernel artifacts.
-    kernel_dir, kernel_debug_dir = repack_gki_lib.fetch_kernel(
-        client=client,
-        out_dir=tmp_kernel_dir,
-        build_id=args.kernel_build_id,
-        kernel_target=args.kernel_target,
-        kernel_debug_target=args.kernel_debug_target,
-    )
-
-    # Save kernel artifacts to the out dir.
-    kernel_out_dir = os.path.join(args.out_dir, 'kernel', args.kernel_version)
-    if not os.path.exists(kernel_out_dir):
-      os.makedirs(kernel_out_dir)
-
-    def copy_kernel_file(in_dir, filename, outname=None):
-      if not outname:
-        outname = filename
-      shutil.copy(
-          os.path.join(in_dir, filename), os.path.join(kernel_out_dir, outname))
-
-    copy_kernel_file(kernel_dir, 'System.map')
-    copy_kernel_file(kernel_dir, 'abi_symbollist')
-    copy_kernel_file(kernel_dir, 'vmlinux')
-    copy_kernel_file(kernel_dir, 'Image',
-                     'kernel-{}'.format(args.kernel_version))
-    copy_kernel_file(kernel_dir, 'Image.lz4',
-                     'kernel-{}-lz4'.format(args.kernel_version))
-    copy_kernel_file(kernel_dir, 'Image.gz',
-                     'kernel-{}-gz'.format(args.kernel_version))
-    copy_kernel_file(kernel_debug_dir, 'System.map', 'System.map-allsyms')
-    copy_kernel_file(kernel_debug_dir, 'abi-generated.xml')
-    copy_kernel_file(kernel_debug_dir, 'abi-full-generated.xml')
-    copy_kernel_file(kernel_debug_dir, 'Image',
-                     'kernel-{}-allsyms'.format(args.kernel_version))
-    copy_kernel_file(kernel_debug_dir, 'Image.lz4',
-                     'kernel-{}-lz4-allsyms'.format(args.kernel_version))
-    copy_kernel_file(kernel_debug_dir, 'Image.gz',
-                     'kernel-{}-gz-allsyms'.format(args.kernel_version))
-
-    # Repack individual boot images using the fetched kernel artifacts,
-    # then save to the out dir.
-    repack_gki_lib.repack_bootimgs(tmp_bootimg_dir, kernel_dir,
-                                   kernel_debug_dir)
-    shutil.copytree(tmp_bootimg_dir, args.out_dir, dirs_exist_ok=True)
-
-    # Repack boot images inside the img.zip and save to the out dir.
-    img_zip_name = [f for f in os.listdir(tmp_bootimg_dir) if '-img-' in f][0]
-    img_zip_path = os.path.join(tmp_bootimg_dir, img_zip_name)
-    repack_gki_lib.repack_img_zip(img_zip_path, kernel_dir, kernel_debug_dir,
-                                  args.kernel_version)
-    shutil.copy(img_zip_path, args.out_dir)
-
-    # Replace kernels within the target_files.zip and save to the out dir.
-    # TODO(b/209035444): GSI target_files does not yet include a 5.15 boot.img.
-    if args.kernel_version != '5.15':
-      target_files_zip_name = [
-          f for f in os.listdir(tmp_bootimg_dir) if '-target_files-' in f
-      ][0]
-      target_files_zip_path = os.path.join(tmp_bootimg_dir, target_files_zip_name)
-      repack_gki_lib.replace_target_files_zip_kernels(target_files_zip_path,
-                                                      kernel_out_dir,
-                                                      args.kernel_version)
-      shutil.copy(target_files_zip_path, args.out_dir)
-
-    # Copy otatools.zip from the ramdisk build, used for GKI signing.
-    shutil.copy(os.path.join(tmp_bootimg_dir, 'otatools.zip'), args.out_dir)
-
-    # Write prebuilt-info.txt using the prebuilt artifact build IDs.
-    data = {
-        'ramdisk-build-id': int(args.ramdisk_build_id),
-        'kernel-build-id': int(args.kernel_build_id),
-    }
-    with open(os.path.join(kernel_out_dir, 'prebuilt-info.txt'), 'w') as f:
-      json.dump(data, f, indent=4)
-
-
-if __name__ == '__main__':
-  main()
diff --git a/gki/repack_gki_lib.py b/gki/repack_gki_lib.py
deleted file mode 100644
index 9051a65..0000000
--- a/gki/repack_gki_lib.py
+++ /dev/null
@@ -1,164 +0,0 @@
-"""Helper library for repacking GKI boot images."""
-import os
-import shutil
-import subprocess
-import tempfile
-
-from treble.fetcher import fetcher_lib
-
-
-def fetch_bootimg(client, out_dir, build_id, kernel_version, target):
-  """Fetches boot.img artifacts from a given build ID."""
-  fetcher_lib.fetch_artifacts(
-      client=client,
-      build_id=build_id,
-      target=target,
-      pattern=r'(gsi_.*-img-.*\.zip|gsi_.*-target_files-.*\.zip|boot-debug-{version}.*\.img|boot-test-harness-{version}.*\.img|otatools.zip)'
-      .format(version=kernel_version),
-      out_dir=out_dir)
-
-
-def fetch_kernel(client, out_dir, build_id, kernel_target, kernel_debug_target):
-  """Fetches kernel artifacts from a given build ID."""
-  kernel_dir = os.path.join(out_dir, 'kernel')
-  kernel_debug_dir = os.path.join(out_dir, 'kernel_debug')
-  os.makedirs(kernel_dir)
-  os.makedirs(kernel_debug_dir)
-
-  fetcher_lib.fetch_artifacts(
-      client=client,
-      build_id=build_id,
-      target=kernel_target,
-      pattern=r'(Image|Image.lz4|System\.map|abi_symbollist|vmlinux)',
-      out_dir=kernel_dir)
-  fetcher_lib.fetch_artifacts(
-      client=client,
-      build_id=build_id,
-      target=kernel_debug_target,
-      pattern=r'(Image|Image.lz4|System\.map|abi-generated.xml|abi-full-generated.xml)',
-      out_dir=kernel_debug_dir)
-
-  print('Compressing kernels')
-
-  def compress_kernel(kernel_path):
-    zipped_kernel_path = os.path.join(os.path.dirname(kernel_path), 'Image.gz')
-    with open(zipped_kernel_path, 'wb') as zipped_kernel:
-      cmd = [
-          'gzip',
-          '-nc',
-          kernel_path,
-      ]
-      print(' '.join(cmd))
-      subprocess.check_call(cmd, stdout=zipped_kernel)
-
-  compress_kernel(os.path.join(kernel_dir, 'Image'))
-  compress_kernel(os.path.join(kernel_debug_dir, 'Image'))
-
-  return kernel_dir, kernel_debug_dir
-
-
-def _replace_kernel(bootimg_path, kernel_path):
-  """Unpacks a boot.img, replaces the kernel, then repacks."""
-  with tempfile.TemporaryDirectory() as unpack_dir:
-    print('Unpacking bootimg %s' % bootimg_path)
-    cmd = [
-        'out/host/linux-x86/bin/unpack_bootimg',
-        '--boot_img',
-        bootimg_path,
-        '--out',
-        unpack_dir,
-        '--format',
-        'mkbootimg',
-    ]
-    print(' '.join(cmd))
-    mkbootimg_args = subprocess.check_output(cmd).decode('utf-8').split(' ')
-    print('Copying kernel %s' % kernel_path)
-    shutil.copy(kernel_path, os.path.join(unpack_dir, 'kernel'))
-    print('Repacking with mkbootimg')
-    cmd = [
-        'out/host/linux-x86/bin/mkbootimg',
-        '--output',
-        bootimg_path,
-    ] + mkbootimg_args
-    print(' '.join(cmd))
-    subprocess.check_call(cmd)
-
-
-def repack_bootimgs(bootimg_dir, kernel_dir, kernel_debug_dir):
-  """Repacks all boot images in a given dir using the provided kernels."""
-  for bootimg_path in os.listdir(bootimg_dir):
-    bootimg_path = os.path.join(bootimg_dir, bootimg_path)
-    if not bootimg_path.endswith('.img'):
-      continue
-
-    kernel_name = 'Image'
-    if '-gz' in bootimg_path:
-      kernel_name = 'Image.gz'
-    elif '-lz4' in bootimg_path:
-      kernel_name = 'Image.lz4'
-
-    kernel_path = os.path.join(kernel_dir, kernel_name)
-    if bootimg_path.endswith('-allsyms.img'):
-      kernel_path = os.path.join(kernel_debug_dir, kernel_name)
-
-    _replace_kernel(bootimg_path, kernel_path)
-
-
-def repack_img_zip(img_zip_path, kernel_dir, kernel_debug_dir, kernel_version):
-  """Repacks boot images within an img.zip archive."""
-  with tempfile.TemporaryDirectory() as unzip_dir:
-    # TODO(b/209035444): 5.15 GSI boot.img is not yet available, so reuse 5.10 boot.img
-    # which should have an identical ramdisk.
-    if kernel_version == '5.15':
-      kernel_version = '5.10'
-    pattern = 'boot-{}*'.format(kernel_version)
-    print('Unzipping %s to repack bootimgs' % img_zip_path)
-    cmd = [
-        'unzip',
-        '-d',
-        unzip_dir,
-        img_zip_path,
-        pattern,
-    ]
-    print(' '.join(cmd))
-    subprocess.check_call(cmd)
-    repack_bootimgs(unzip_dir, kernel_dir, kernel_debug_dir)
-    cmd = [
-        'zip',
-        img_zip_path,
-        pattern,
-    ]
-    print(' '.join(cmd))
-    subprocess.check_call(cmd, cwd=unzip_dir)
-
-
-def replace_target_files_zip_kernels(target_files_zip_path, kernel_out_dir,
-                                     kernel_version):
-  """Replaces the BOOT/kernel-* kernels within a target_files.zip archive."""
-  with tempfile.TemporaryDirectory() as unzip_dir:
-    pattern = 'BOOT/kernel-{}*'.format(kernel_version)
-    print(
-        'Unzipping %s to replace kernels in preparation for signing' %
-        target_files_zip_path,)
-    cmd = [
-        'unzip',
-        '-d',
-        unzip_dir,
-        target_files_zip_path,
-        pattern,
-    ]
-    print(' '.join(cmd))
-    subprocess.check_call(cmd)
-    for kernel in os.listdir(kernel_out_dir):
-      if kernel.startswith('kernel-{}'.format(kernel_version)):
-        print('Copying %s' % kernel)
-        shutil.copy(
-            os.path.join(kernel_out_dir, kernel),
-            os.path.join(unzip_dir, 'BOOT'))
-    cmd = [
-        'zip',
-        target_files_zip_path,
-        pattern,
-    ]
-    print(' '.join(cmd))
-    subprocess.check_call(cmd, cwd=unzip_dir)
diff --git a/split/Android.bp b/split/Android.bp
index f35167f..331354b 100644
--- a/split/Android.bp
+++ b/split/Android.bp
@@ -87,6 +87,9 @@
         "xml_diff.py",
         "xml_diff_test.py",
     ],
+    libs: [
+        "py-mock",
+    ],
     test_config: "test.xml",
     test_suites: ["general-tests"],
 }
diff --git a/split/manifest_split.py b/split/manifest_split.py
index 5114f24..d5f9b95 100644
--- a/split/manifest_split.py
+++ b/split/manifest_split.py
@@ -121,12 +121,10 @@
       this project, for projects that should be added to the resulting manifest.
     path_mappings: A list of PathMappingConfigs to modify a path in the build
       sandbox to the path in the manifest.
-    ignore_paths: Set of paths to ignore when parsing module_info_file
   """
   remove_projects: Dict[str, str]
   add_projects: Dict[str, str]
   path_mappings: List[PathMappingConfig]
-  ignore_paths: Set[str]
 
   @classmethod
   def from_config_files(cls, config_files: List[str]):
@@ -141,8 +139,6 @@
     remove_projects: Dict[str, str] = {}
     add_projects: Dict[str, str] = {}
     path_mappings = []
-    """ Always ignore paths in out/ directory. """
-    ignore_paths = set(["out/"])
     for config_file in config_files:
       root = ET.parse(config_file).getroot()
 
@@ -159,10 +155,7 @@
           for child in root.findall("path_mapping")
       ])
 
-      ignore_paths.update(
-          {c.attrib["name"]: config_file for c in root.findall("ignore_path")})
-
-    return cls(remove_projects, add_projects, path_mappings, ignore_paths)
+    return cls(remove_projects, add_projects, path_mappings)
 
 
 def get_repo_projects(repo_list_file, manifest, path_mappings):
@@ -202,7 +195,7 @@
 class ModuleInfo:
   """Contains various mappings to/from module/project"""
 
-  def __init__(self, module_info_file, repo_projects, ignore_paths):
+  def __init__(self, module_info_file, repo_projects):
     """Initialize a module info instance.
 
     Builds various maps related to platform build system modules and how they
@@ -211,7 +204,6 @@
     Args:
       module_info_file: The path to a module-info.json file from a build.
       repo_projects: The output of the get_repo_projects function.
-      ignore_paths: Set of paths to ignore from module_info_file data
 
     Raises:
       ValueError: A module from module-info.json belongs to a path not
@@ -229,18 +221,14 @@
     with open(module_info_file) as module_info_file:
       module_info = json.load(module_info_file)
 
-    # Check that module contains a path and the path is not in set of
-    # ignore paths
     def module_has_valid_path(module):
-      paths = module.get("path")
-      if not paths:
-        return False
-      return all(not paths[0].startswith(p) for p in ignore_paths)
+      return ("path" in module_info[module] and module_info[module]["path"] and
+              not module_info[module]["path"][0].startswith("out/"))
 
     module_paths = {
         module: module_info[module]["path"][0]
         for module in module_info
-        if module_has_valid_path(module_info[module])
+        if module_has_valid_path(module)
     }
     module_project_paths = {
         module: scan_repo_projects(repo_projects, module_paths[module])
@@ -531,8 +519,7 @@
 
   # While we still have projects whose modules we haven't checked yet,
   if module_info_file:
-    module_info = ModuleInfo(module_info_file, repo_projects,
-                             config.ignore_paths)
+    module_info = ModuleInfo(module_info_file, repo_projects)
     checked_projects = set()
     projects_to_check = input_projects.difference(checked_projects)
     logger.info("Checking module-info dependencies for direct and adjacent modules...")
diff --git a/split/manifest_split_test.py b/split/manifest_split_test.py
index d9c6f76..546d3c1 100644
--- a/split/manifest_split_test.py
+++ b/split/manifest_split_test.py
@@ -14,12 +14,12 @@
 """Test manifest split."""
 
 import json
+import mock
 import os
 import re
 import subprocess
 import tempfile
 import unittest
-import unittest.mock
 import xml.etree.ElementTree as ET
 
 import manifest_split
@@ -121,9 +121,8 @@
           'system/project4': 'platform/project4',
           'vendor/google/project3': 'vendor/project3',
       }
-      ignore_paths = set(['out/'])
       module_info = manifest_split.ModuleInfo(module_info_file.name,
-                                              repo_projects, ignore_paths)
+                                              repo_projects)
       self.assertEqual(
           module_info.project_modules, {
               'platform/project1': set(['target1a', 'target1b']),
@@ -164,13 +163,11 @@
       }""")
       module_info_file.flush()
       repo_projects = {}
-      ignore_paths = set()
       with self.assertRaisesRegex(ValueError,
                                   'Unknown module path for module target1'):
-        manifest_split.ModuleInfo(module_info_file.name, repo_projects,
-                                  ignore_paths)
+        manifest_split.ModuleInfo(module_info_file.name, repo_projects)
 
-  @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_get_ninja_inputs(self, mock_check_output):
     mock_check_output.return_value = b"""
     path/to/input1
@@ -182,7 +179,7 @@
     inputs = manifest_split.get_ninja_inputs('unused', 'unused', ['droid'])
     self.assertEqual(inputs, {'path/to/input1', 'path/to/input2'})
 
-  @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_get_ninja_inputs_includes_test_mapping(self, mock_check_output):
     mock_check_output.return_value = b"""
     path/to/input1
@@ -195,7 +192,7 @@
     self.assertEqual(
         inputs, {'path/to/input1', 'path/to/input2', 'path/to/TEST_MAPPING'})
 
-  @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_get_kati_makefiles(self, mock_check_output):
     with tempfile.TemporaryDirectory() as temp_dir:
       os.chdir(temp_dir)
@@ -294,7 +291,7 @@
         ET.tostring(projects[0]).strip().decode(),
         '<project name="platform/project1" path="system/project1" />')
 
-  @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_create_split_manifest(self, mock_check_output):
     with tempfile.NamedTemporaryFile('w+t') as repo_list_file, \
       tempfile.NamedTemporaryFile('w+t') as manifest_file, \
@@ -447,9 +444,9 @@
         self.assertEqual(debug_data['vendor/project1']['kati_makefiles'][0],
                          product_makefile)
 
-  @unittest.mock.patch.object(manifest_split, 'get_ninja_inputs', autospec=True)
-  @unittest.mock.patch.object(manifest_split, 'get_kati_makefiles', autospec=True)
-  @unittest.mock.patch.object(manifest_split.ModuleInfo, '__init__', autospec=True)
+  @mock.patch.object(manifest_split, 'get_ninja_inputs', autospec=True)
+  @mock.patch.object(manifest_split, 'get_kati_makefiles', autospec=True)
+  @mock.patch.object(manifest_split.ModuleInfo, '__init__', autospec=True)
   def test_create_split_manifest_skip_kati_module_info(self, mock_init,
                                                        mock_get_kati_makefiles,
                                                        mock_get_ninja_inputs):
@@ -486,7 +483,7 @@
     mock_get_kati_makefiles.assert_not_called()
     mock_init.assert_not_called()
 
-  @unittest.mock.patch.object(subprocess, 'check_output', autospec=True)
+  @mock.patch.object(subprocess, 'check_output', autospec=True)
   def test_create_split_manifest_installed_prebuilt(self, mock_check_output):
 
     # The purpose of this test is to verify that create_split_manifests treats
diff --git a/vf/merge.sh b/vf/merge.sh
index 8e76241..20ceb6b 100755
--- a/vf/merge.sh
+++ b/vf/merge.sh
@@ -5,14 +5,14 @@
 
 set -e
 
-while getopts ":t:d:v:b:m:r:" option ; do
+while getopts ":t:d:v:b:m:" option ; do
   case "${option}" in
     t) TARGET=${OPTARG} ;;
     d) DIST_DIR=${OPTARG} ;;
     v) VENDOR_DIR=${OPTARG} ;;
     b) BUILD_ID=${OPTARG} ;;
+    # TODO(b/170638547) Remove the need for merge configs.
     m) MERGE_CONFIG_DIR=${OPTARG} ;;
-    r) HAS_RADIO_IMG=${OPTARG} ;;
     *) echo "Unexpected argument: -${OPTARG}" >&2 ;;
   esac
 done
@@ -33,8 +33,9 @@
   echo "error: -b build id argument not set"
   exit 1
 fi
-if [[ -z "${HAS_RADIO_IMG}" ]]; then
-  HAS_RADIO_IMG="true"
+if [[ -z "${MERGE_CONFIG_DIR}" ]]; then
+  echo "error: -m merge config dir argument not set"
+  exit 1
 fi
 
 # Move the system-only build artifacts to a separate folder
@@ -47,33 +48,20 @@
 source build/envsetup.sh
 lunch ${TARGET}-userdebug
 
-EXTRA_FLAGS=""
-if [[ "${MERGE_CONFIG_DIR}" ]]; then
-  EXTRA_FLAGS+=" --framework-item-list ${MERGE_CONFIG_DIR}/framework_item_list.txt \
-  --framework-misc-info-keys ${MERGE_CONFIG_DIR}/framework_misc_info_keys.txt \
-  --vendor-item-list ${MERGE_CONFIG_DIR}/vendor_item_list.txt"
-fi
 out/host/linux-x86/bin/merge_target_files \
   --framework-target-files ${SYSTEM_DIR}/${TARGET}-target_files*.zip \
   --vendor-target-files ${VENDOR_DIR}/*-target_files-*.zip \
+  --framework-item-list ${MERGE_CONFIG_DIR}/framework_item_list.txt \
+  --framework-misc-info-keys ${MERGE_CONFIG_DIR}/framework_misc_info_keys.txt \
+  --vendor-item-list ${MERGE_CONFIG_DIR}/vendor_item_list.txt \
   --allow-duplicate-apkapex-keys \
   --output-target-files ${DIST_DIR}/${TARGET}-target_files-${BUILD_ID}.zip \
   --output-img  ${DIST_DIR}/${TARGET}-img-${BUILD_ID}.zip \
-  --output-ota  ${DIST_DIR}/${TARGET}-ota-${BUILD_ID}.zip \
-  ${EXTRA_FLAGS}
+  --output-ota  ${DIST_DIR}/${TARGET}-ota-${BUILD_ID}.zip
 
 # Copy bootloader.img, radio.img, and android-info.txt, needed for flashing.
 cp ${VENDOR_DIR}/bootloader.img ${DIST_DIR}/bootloader.img
-# Copy radio.img unless arg is "false" (eg. Android TV targets)
-if [[ $HAS_RADIO_IMG = "true" ]]; then
-  cp ${VENDOR_DIR}/radio.img ${DIST_DIR}/radio.img
-fi
-
-# Copy vendor otatools.zip, needed by sign_target_files_apks
-if [[ -f "${VENDOR_DIR}/otatools.zip" ]]; then
-  cp ${VENDOR_DIR}/otatools.zip ${DIST_DIR}/otatools_vendor.zip
-fi
-
+cp ${VENDOR_DIR}/radio.img ${DIST_DIR}/radio.img
 unzip -j -d ${DIST_DIR} \
   ${VENDOR_DIR}/*-target_files-*.zip \
-  OTA/android-info.txt
\ No newline at end of file
+  OTA/android-info.txt