Merge empty history for sparse-11710750-L58100030003178929

Change-Id: I34ff8a778b0da414390c9826138c6721cc174f31
diff --git a/.gitignore b/.gitignore
index 68d603f..7da83c3 100644
--- a/.gitignore
+++ b/.gitignore
@@ -6,7 +6,6 @@
 build.xml
 /host-tools/
 /secrets.py
-/.idea
 
 # https://github.com/github/gitignore/blob/master/Python.gitignore
 # Byte-compiled / optimized / DLL files
diff --git a/OWNERS b/OWNERS
index 1479e1b..207aac3 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,8 +1,7 @@
-blume@google.com
 danalbert@google.com
 enh@google.com
+jmgao@google.com
 rprichard@google.com
-zijunzhao@google.com
 
 # For questions rather than code review, send email to:
 #
diff --git a/README.md b/README.md
index 1532861..81fb8a5 100644
--- a/README.md
+++ b/README.md
@@ -9,10 +9,9 @@
 The NDK allows Android application developers to include native code in their
 Android application packages, compiled as JNI shared libraries.
 
-To navigate this site, use the navigation bar at the top of the page. For
-additional documentation, see the listing for the [docs directory]. If you're
-not sure where to start, the [Onboarding](docs/Onboarding.md) page will guide
-you.
+This page provides an overview of what is contained in the NDK. For
+information on building or testing the NDK, the roadmap, or other information,
+see the navigation bar at the top of this page, or the [docs directory].
 
 [docs directory]: docs/
 
@@ -30,3 +29,39 @@
 [Android Developer website]: https://developer.android.com/ndk/index.html
 [android-ndk Google Group]: http://groups.google.com/group/android-ndk
 [android-ndk-announce Google Group]: http://groups.google.com/group/android-ndk-announce
+
+## Components
+
+The NDK components can be loosely grouped into host toolchains, target
+prebuilts, build systems, and support libraries.
+
+For more information, see the [Build System Maintainers] guide.
+
+[Build System Maintainers]: docs/BuildSystemMaintainers.md
+
+### Build Systems
+
+While the NDK is primarily a toolchain for building Android code, the package
+also includes some build system support.
+
+First, `$NDK/build/core` contains ndk-build. This is the NDK's home grown build
+system. The entry point for this build system is `$NDK/build/ndk-build` (or
+`$NDK/build/ndk-build.cmd`).
+
+A CMake toolchain file is included at
+`$NDK/build/cmake/android.toolchain.cmake`. This is separate from CMake's own
+support for the NDK.
+
+`$NDK/build/tools/make_standalone_toolchain.py` is a tool which can create a
+redistributable toolchain that targets a single Android ABI and API level. As of
+NDK r19 it is unnecessary, as the installed toolchain may be invoked directly,
+but it remains for compatibility.
+
+Since the Android Gradle plugin is responsible for both Java and native code, is
+not included as part of the NDK.
+
+### Support Libraries
+
+`sources/android` and `sources/third_party` contain modules that can be used in
+apps (gtest, cpufeatures, native\_app\_glue, etc) via `$(call
+import-module,$MODULE)` in ndk-build. CMake modules are not yet available.
diff --git a/bootstrap/__init__.py b/bootstrap/__init__.py
new file mode 100644
index 0000000..2294a7e
--- /dev/null
+++ b/bootstrap/__init__.py
@@ -0,0 +1,355 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Tools for bootstrapping Python 3."""
+import datetime
+import logging
+import multiprocessing
+import os
+import pipes
+import shutil
+import subprocess
+import sys
+import timeit
+
+
+THIS_DIR = os.path.realpath(os.path.dirname(__file__))
+
+
+def logger():
+    """Returns the module level logger."""
+    return logging.getLogger(__name__)
+
+
+def android_path(*args):
+    """Returns the absolute path rooted within the top level source tree."""
+    return os.path.normpath(os.path.join(THIS_DIR, '../..', *args))
+
+
+PYTHON_SOURCE = android_path('external/python/cpython3')
+
+
+def _get_dir_from_env(default, env_var):
+    """Returns the path to a directory specified by the environment.
+
+    If the environment variable is not set, the default will be used. The
+    directory is created if it does not exist.
+
+    Args:
+        default: The path used if the environment variable is not set.
+        env_var: The environment variable that contains the path, if any.
+
+    Returns:
+        The absolute path to the directory.
+    """
+    path = os.path.realpath(os.getenv(env_var, default))
+    if not os.path.isdir(path):
+        os.makedirs(path)
+    return path
+
+
+def get_out_dir():
+    """Returns the out directory."""
+    return _get_dir_from_env(android_path('out'), 'OUT_DIR')
+
+
+def get_dist_dir():
+    """Returns the distribution directory.
+
+    The contents of the distribution directory are archived on the build
+    servers. Suitable for build logs and final artifacts.
+    """
+    return _get_dir_from_env(os.path.join(get_out_dir(), 'dist'), 'DIST_DIR')
+
+
+def path_in_out(dirname):
+    """Returns a path within the out directory."
+
+    Args:
+        dirname: Name of the directory.
+
+    Returns:
+        Absolute path within the out directory.
+    """
+    return os.path.join(get_out_dir(), dirname)
+
+
+def log_failure_and_exit(output):
+    """Logs the bootstrapping failure and exits.
+
+    Args:
+        output: Output of the failed command.
+    """
+    log_dir = os.path.join(get_dist_dir(), 'logs')
+    if not os.path.exists(log_dir):
+        os.makedirs(log_dir)
+    log_path = os.path.join(log_dir, 'build_error.log')
+    with open(log_path, 'w') as error_log:
+        error_log.write('Bootstrapping failed!\n')
+        error_log.write(output)
+
+    logger().error(output)
+    sys.exit('Bootstrapping failed!')
+
+
+def check_output(cmd):
+    """Logged version of subprocess.check_output.
+
+    stderr is automatically forwarded to stdout.
+
+    Args:
+        cmd: argv style argument list for the process to be run.
+
+    Returns:
+        Output
+    """
+    logger().debug('Runnning: %s', ' '.join([pipes.quote(a) for a in cmd]))
+    return subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+
+
+def build_python(install_dir, build_dir):
+    """Builds and installs Python to the given directory.
+
+    Args:
+        install_dir: Install path for the built Python distribution.
+        build_dir: Directory to use for building Python.
+    """
+    logger().info('Bootstrapping Python...')
+
+    if os.path.exists(build_dir):
+        shutil.rmtree(build_dir)
+    os.makedirs(build_dir)
+
+    old_cwd = os.getcwd()
+    try:
+        os.chdir(build_dir)
+
+        check_output([
+            os.path.join(PYTHON_SOURCE, 'configure'),
+            '--prefix=' + install_dir,
+
+            # This enables PGO and requires running all the Python tests to
+            # generate those profiles. If we end up repackaging this Python to
+            # ship in the NDK we should do this, but for now it makes
+            # bootstrapping take a lot longer and we don't need the perforance
+            # since our build time is dominated by non-Python code anyway.
+            # '--enable-optimizations',
+        ])
+
+        check_output([
+            'make',
+            '-j',
+            str(multiprocessing.cpu_count()),
+            'install',
+        ])
+    except subprocess.CalledProcessError as ex:
+        log_failure_and_exit(ex.output)
+    finally:
+        os.chdir(old_cwd)
+
+
+def install_requirements(install_dir, requirements):
+    """Installs required Python packages using pip.
+
+    Args:
+        install_dir: Directory in which Python 3 is installed.
+        requirements: Path to requirements.txt file to be passed to pip.
+    """
+    logger().info('Installing additional requirements...')
+    try:
+        check_output([
+            os.path.join(install_dir, 'bin/pip3'),
+            'install',
+            '-r',
+            requirements,
+        ])
+    except subprocess.CalledProcessError as ex:
+        log_failure_and_exit(ex.output)
+
+
+class Timer(object):  # pylint: disable=useless-object-inheritance
+    """Execution timer.
+
+    Can be used explicitly with stop/start, but preferably is used as a context
+    manager:
+
+    >>> timer = Timer()
+    >>> with timer:
+    >>>     do_something()
+    >>> print('do_something() took {}'.format(timer.duration))
+    """
+    def __init__(self):
+        self.start_time = None
+        self.end_time = None
+        self.duration = None
+
+    def start(self):
+        """Start the timer."""
+        self.start_time = timeit.default_timer()
+
+    def finish(self):
+        """Stop the timer."""
+        self.end_time = timeit.default_timer()
+
+        # Not interested in partial seconds at this scale.
+        seconds = int(self.end_time - self.start_time)
+        self.duration = datetime.timedelta(seconds=seconds)
+
+    def __enter__(self):
+        self.start()
+
+    def __exit__(self, _exc_type, _exc_value, _traceback):
+        self.finish()
+
+
+def read_requirements(requirements):
+    """Returns the contents of a requirements file or None.
+
+    Args:
+        requirements: Path to a requirements.txt file that may or may not
+        exist, or none.
+
+    Returns:
+        The contents of the requirements file if it exists, or None if the
+        requirequirements file is None or does not exist.
+    """
+
+    if requirements is None:
+        return None
+
+    if not os.path.exists(requirements):
+        return None
+
+    with open(requirements) as requirements_file:
+        return requirements_file.read()
+
+
+class BootstrapManifest(object):  # pylint: disable=useless-object-inheritance
+    """Describes the contents of the bootstrapped directory."""
+
+    SOURCE_MANIFEST_PATH = os.path.join(PYTHON_SOURCE, 'README.rst')
+
+    def __init__(self, install_path, requirements):
+        self.install_path = install_path
+        self.manifest_file = os.path.join(self.install_path, '.bootstrapped')
+
+        self.requested_requirements_path = requirements
+        self.bootstrapped_requirements_path = os.path.join(
+            self.install_path, 'requirements.txt')
+
+        self.requested_requirements = read_requirements(
+            self.requested_requirements_path)
+        self.bootstrapped_requirements = read_requirements(
+            self.bootstrapped_requirements_path)
+
+    def is_up_to_date(self):
+        """Returns True if the bootstrap install is up to date."""
+        if not os.path.exists(self.manifest_file):
+            return False
+        if not self.versions_match():
+            logger().info('Bootstrap out of date: Python has changed.')
+            return False
+        if self.requested_requirements != self.bootstrapped_requirements:
+            logger().info('Bootstrap out of date: requirements have changed.')
+            return False
+        return True
+
+    def versions_match(self):
+        """Returns True if the bootstrap has an up to date Python."""
+        # Ideally this would be a check of the git revision of the Python
+        # source, but we can't assume that information is available on the
+        # build servers. For now, assume the README.rst will change for any
+        # update. This should be fine since updates should include a change to
+        # the version number.
+
+        # This function should not be called if this file does not exist.
+        assert os.path.exists(self.manifest_file)
+
+        with open(self.SOURCE_MANIFEST_PATH) as readme_rst:
+            source_manifest = readme_rst.read()
+        with open(self.manifest_file) as manifest_file:
+            bootstrapped_manifest = manifest_file.read()
+
+        return source_manifest == bootstrapped_manifest
+
+    def save(self):
+        """Saves the bootstrap manifest to disk."""
+        self.save_python_version()
+        self.save_requirements()
+
+    def save_python_version(self):
+        shutil.copy2(self.SOURCE_MANIFEST_PATH, self.manifest_file)
+
+    def save_requirements(self):
+        if self.requested_requirements is not None:
+            shutil.copy2(self.requested_requirements_path,
+                         self.bootstrapped_requirements_path)
+        # An existing bootstrap directory is removed if it needed to be
+        # updated, so no need to remove an existing requirements file in the
+        # case where a requirements file was used but no longer is.
+
+
+def do_bootstrap(install_dir, requirements):
+    """Helper function for bootstrapping.
+
+    Builds and installs Python 3 if necessary, but does not modify the
+    environment.
+
+    Args:
+        install_dir: Directory in which to install Python 3.
+        requirements: An optional path to a requirements.txt file. This will be
+            passed to pip to install additional dependencies. If None, no
+            additional packages will be installed.
+
+    Returns:
+        Python 3 install directory.
+    """
+    build_dir = path_in_out('bootstrap-build')
+
+    bootstrap_manifest = BootstrapManifest(install_dir, requirements)
+    if bootstrap_manifest.is_up_to_date():
+        return
+
+    # If the bootstrap exists but is not up to date, purge it to ensure no
+    # stale files remain.
+    if os.path.exists(install_dir):
+        shutil.rmtree(install_dir)
+
+    timer = Timer()
+    with timer:
+        build_python(install_dir, build_dir)
+        if requirements is not None:
+            install_requirements(install_dir, requirements)
+    logger().info('Bootstrapping completed in %s', timer.duration)
+
+    bootstrap_manifest.save()
+
+
+def bootstrap(requirements=None):
+    """Creates a bootstrap Python 3 environment.
+
+    Builds and installs Python 3 for use on the current host. After execution,
+    the directory containing the python3 binary will be the first element in
+    the PATH.
+
+    Args:
+        requirements: An optional path to a requirements.txt file. This will be
+            passed to pip to install additional dependencies. If None, no
+            additional packages will be installed.
+    """
+    install_dir = path_in_out('bootstrap')
+    do_bootstrap(install_dir, requirements)
+    bootstrap_bin = os.path.join(install_dir, 'bin')
+    os.environ['PATH'] = os.pathsep.join([bootstrap_bin, os.environ['PATH']])
diff --git a/build/cmake/adjust_api_level.cmake b/build/cmake/adjust_api_level.cmake
deleted file mode 100644
index 51e31ab..0000000
--- a/build/cmake/adjust_api_level.cmake
+++ /dev/null
@@ -1,64 +0,0 @@
-include(${CMAKE_ANDROID_NDK}/build/cmake/abis.cmake)
-include(${CMAKE_ANDROID_NDK}/build/cmake/platforms.cmake)
-
-function(adjust_api_level api_level result_name)
-  # If no platform version was chosen by the user, default to the minimum
-  # version supported by this NDK.
-  if(NOT api_level)
-    message(STATUS
-      "ANDROID_PLATFORM not set. Defaulting to minimum supported version "
-      "${NDK_MIN_PLATFORM_LEVEL}.")
-
-    set(api_level "android-${NDK_MIN_PLATFORM_LEVEL}")
-  endif()
-
-  if(api_level STREQUAL "latest")
-    message(STATUS
-      "Using latest available ANDROID_PLATFORM: ${NDK_MAX_PLATFORM_LEVEL}.")
-    set(api_level "android-${NDK_MAX_PLATFORM_LEVEL}")
-  endif()
-
-  string(REPLACE "android-" "" result ${api_level})
-
-  # Aliases defined by meta/platforms.json include codename aliases for platform
-  # API levels as well as cover any gaps in platforms that may not have had NDK
-  # APIs.
-  if(NOT "${NDK_PLATFORM_ALIAS_${result}}" STREQUAL "")
-    message(STATUS
-      "${api_level} is an alias for ${NDK_PLATFORM_ALIAS_${result}}. Adjusting "
-      "ANDROID_PLATFORM to match.")
-    set(api_level "${NDK_PLATFORM_ALIAS_${result}}")
-    string(REPLACE "android-" "" result ${api_level})
-  endif()
-
-  # Pull up to the minimum supported version if an old API level was requested.
-  if(result LESS NDK_MIN_PLATFORM_LEVEL)
-    message(STATUS
-      "${api_level} is unsupported. Using minimum supported version "
-      "${NDK_MIN_PLATFORM_LEVEL}.")
-    set(api_level "android-${NDK_MIN_PLATFORM_LEVEL}")
-    string(REPLACE "android-" "" result ${api_level})
-  endif()
-
-  # Pull up any ABI-specific minimum API levels.
-  set(min_for_abi ${NDK_ABI_${ANDROID_ABI}_MIN_OS_VERSION})
-
-  if(result LESS min_for_abi)
-    message(STATUS
-      "android-${result} is not supported for ${ANDROID_ABI}. Using minimum "
-      "supported ${ANDROID_ABI} version ${min_for_abi}.")
-    set(api_level android-${min_for_abi})
-    set(result ${min_for_abi})
-  endif()
-
-  # ANDROID_PLATFORM beyond the maximum is an error. The correct way to specify
-  # the latest version is ANDROID_PLATFORM=latest.
-  if(result GREATER NDK_MAX_PLATFORM_LEVEL)
-    message(SEND_ERROR
-      "${api_level} is above the maximum supported version "
-      "${NDK_MAX_PLATFORM_LEVEL}. Choose a supported API level or set "
-      "ANDROID_PLATFORM to \"latest\".")
-  endif()
-
-  set(${result_name} ${result} PARENT_SCOPE)
-endfunction()
diff --git a/build/cmake/android-legacy.toolchain.cmake b/build/cmake/android-legacy.toolchain.cmake
deleted file mode 100644
index 610024b..0000000
--- a/build/cmake/android-legacy.toolchain.cmake
+++ /dev/null
@@ -1,752 +0,0 @@
-# Copyright (C) 2016 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Configurable variables.
-# Modeled after the ndk-build system.
-# For any variables defined in:
-#         https://developer.android.com/ndk/guides/android_mk.html
-#         https://developer.android.com/ndk/guides/application_mk.html
-# if it makes sense for CMake, then replace LOCAL, APP, or NDK with ANDROID, and
-# we have that variable below.
-#
-# ANDROID_TOOLCHAIN
-# ANDROID_ABI
-# ANDROID_PLATFORM
-# ANDROID_STL
-# ANDROID_PIE
-# ANDROID_CPP_FEATURES
-# ANDROID_ALLOW_UNDEFINED_SYMBOLS
-# ANDROID_ARM_MODE
-# ANDROID_DISABLE_FORMAT_STRING_CHECKS
-# ANDROID_CCACHE
-# ANDROID_SANITIZE
-
-cmake_minimum_required(VERSION 3.6.0)
-
-# Inhibit all of CMake's own NDK handling code.
-set(CMAKE_SYSTEM_VERSION 1)
-
-# Android NDK
-get_filename_component(ANDROID_NDK_EXPECTED_PATH
-    "${CMAKE_CURRENT_LIST_DIR}/../.." ABSOLUTE)
-if(NOT ANDROID_NDK)
-  set(ANDROID_NDK "${ANDROID_NDK_EXPECTED_PATH}")
-else()
-  # Allow the user to specify their own NDK path, but emit a warning. This is an
-  # uncommon use case, but helpful if users want to use a bleeding edge
-  # toolchain file with a stable NDK.
-  # https://github.com/android-ndk/ndk/issues/473
-  get_filename_component(ANDROID_NDK "${ANDROID_NDK}" ABSOLUTE)
-  if(NOT "${ANDROID_NDK}" STREQUAL "${ANDROID_NDK_EXPECTED_PATH}")
-    message(WARNING "Using custom NDK path (ANDROID_NDK is set): ${ANDROID_NDK}")
-  endif()
-endif()
-unset(ANDROID_NDK_EXPECTED_PATH)
-file(TO_CMAKE_PATH "${ANDROID_NDK}" ANDROID_NDK)
-
-# Android NDK revision
-# Possible formats:
-# * r16, build 1234: 16.0.1234
-# * r16b, build 1234: 16.1.1234
-# * r16 beta 1, build 1234: 16.0.1234-beta1
-#
-# Canary builds are not specially marked.
-file(READ "${ANDROID_NDK}/source.properties" ANDROID_NDK_SOURCE_PROPERTIES)
-
-set(ANDROID_NDK_REVISION_REGEX
-  "^Pkg\\.Desc = Android NDK\nPkg\\.Revision = ([0-9]+)\\.([0-9]+)\\.([0-9]+)(-beta([0-9]+))?")
-if(NOT ANDROID_NDK_SOURCE_PROPERTIES MATCHES "${ANDROID_NDK_REVISION_REGEX}")
-  message(SEND_ERROR "Failed to parse Android NDK revision: ${ANDROID_NDK}/source.properties.\n${ANDROID_NDK_SOURCE_PROPERTIES}")
-endif()
-
-set(ANDROID_NDK_MAJOR "${CMAKE_MATCH_1}")
-set(ANDROID_NDK_MINOR "${CMAKE_MATCH_2}")
-set(ANDROID_NDK_BUILD "${CMAKE_MATCH_3}")
-set(ANDROID_NDK_BETA "${CMAKE_MATCH_5}")
-if(ANDROID_NDK_BETA STREQUAL "")
-  set(ANDROID_NDK_BETA "0")
-endif()
-set(ANDROID_NDK_REVISION
-  "${ANDROID_NDK_MAJOR}.${ANDROID_NDK_MINOR}.${ANDROID_NDK_BUILD}${CMAKE_MATCH_4}")
-
-# Touch toolchain variable to suppress "unused variable" warning.
-# This happens if CMake is invoked with the same command line the second time.
-if(CMAKE_TOOLCHAIN_FILE)
-endif()
-
-# Compatibility for configurable variables.
-# Compatible with configurable variables from the other toolchain file:
-#         https://github.com/taka-no-me/android-cmake
-# TODO: We should consider dropping compatibility to simplify things once most
-# of our users have migrated to our standard set of configurable variables.
-if(ANDROID_TOOLCHAIN_NAME AND NOT ANDROID_TOOLCHAIN)
-  if(ANDROID_TOOLCHAIN_NAME MATCHES "-clang([0-9].[0-9])?$")
-    set(ANDROID_TOOLCHAIN clang)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "-[0-9].[0-9]$")
-    set(ANDROID_TOOLCHAIN gcc)
-  endif()
-endif()
-if(ANDROID_ABI STREQUAL "armeabi-v7a with NEON")
-  set(ANDROID_ABI armeabi-v7a)
-elseif(ANDROID_TOOLCHAIN_NAME AND NOT ANDROID_ABI)
-  if(ANDROID_TOOLCHAIN_NAME MATCHES "^arm-linux-androideabi-")
-    set(ANDROID_ABI armeabi-v7a)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^aarch64-linux-android-")
-    set(ANDROID_ABI arm64-v8a)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^x86-")
-    set(ANDROID_ABI x86)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^x86_64-")
-    set(ANDROID_ABI x86_64)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^mipsel-linux-android-")
-    set(ANDROID_ABI mips)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^mips64el-linux-android-")
-    set(ANDROID_ABI mips64)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^riscv64-")
-    set(ANDROID_ABI riscv64)
-  endif()
-endif()
-if(ANDROID_NATIVE_API_LEVEL AND NOT ANDROID_PLATFORM)
-  if(ANDROID_NATIVE_API_LEVEL MATCHES "^android-[0-9]+$")
-    set(ANDROID_PLATFORM ${ANDROID_NATIVE_API_LEVEL})
-  elseif(ANDROID_NATIVE_API_LEVEL MATCHES "^[0-9]+$")
-    set(ANDROID_PLATFORM android-${ANDROID_NATIVE_API_LEVEL})
-  endif()
-endif()
-if(DEFINED ANDROID_APP_PIE AND NOT DEFINED ANDROID_PIE)
-  set(ANDROID_PIE "${ANDROID_APP_PIE}")
-endif()
-if(ANDROID_STL_FORCE_FEATURES AND NOT DEFINED ANDROID_CPP_FEATURES)
-  set(ANDROID_CPP_FEATURES "rtti exceptions")
-endif()
-if(DEFINED ANDROID_NO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  if(ANDROID_NO_UNDEFINED)
-    set(ANDROID_ALLOW_UNDEFINED_SYMBOLS FALSE)
-  else()
-    set(ANDROID_ALLOW_UNDEFINED_SYMBOLS TRUE)
-  endif()
-endif()
-if(DEFINED ANDROID_SO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  set(ANDROID_ALLOW_UNDEFINED_SYMBOLS "${ANDROID_SO_UNDEFINED}")
-endif()
-if(DEFINED ANDROID_FORCE_ARM_BUILD AND NOT ANDROID_ARM_MODE)
-  if(ANDROID_FORCE_ARM_BUILD)
-    set(ANDROID_ARM_MODE arm)
-  else()
-    set(ANDROID_ARM_MODE thumb)
-  endif()
-endif()
-if(NDK_CCACHE AND NOT ANDROID_CCACHE)
-  set(ANDROID_CCACHE "${NDK_CCACHE}")
-endif()
-
-# Default values for configurable variables.
-if(NOT ANDROID_TOOLCHAIN)
-  set(ANDROID_TOOLCHAIN clang)
-elseif(ANDROID_TOOLCHAIN STREQUAL gcc)
-  message(FATAL_ERROR "GCC is no longer supported. See "
-  "https://android.googlesource.com/platform/ndk/+/master/docs/ClangMigration.md.")
-endif()
-if(NOT ANDROID_ABI)
-  set(ANDROID_ABI armeabi-v7a)
-endif()
-
-if(ANDROID_ABI STREQUAL armeabi)
-  message(FATAL_ERROR "armeabi is no longer supported. Use armeabi-v7a.")
-elseif(ANDROID_ABI MATCHES "^(mips|mips64)$")
-  message(FATAL_ERROR "MIPS and MIPS64 are no longer supported.")
-endif()
-
-if(DEFINED ANDROID_ARM_NEON AND NOT ANDROID_ARM_NEON)
-  message(FATAL_ERROR "Disabling Neon is no longer supported")
-endif()
-
-if(ANDROID_ABI STREQUAL armeabi-v7a)
-  set(ANDROID_ARM_NEON TRUE)
-endif()
-
-include(${ANDROID_NDK}/build/cmake/abis.cmake)
-include(${ANDROID_NDK}/build/cmake/platforms.cmake)
-
-# If no platform version was chosen by the user, default to the minimum version
-# supported by this NDK.
-if(NOT ANDROID_PLATFORM)
-  message(STATUS "\
-ANDROID_PLATFORM not set. Defaulting to minimum supported version
-${NDK_MIN_PLATFORM_LEVEL}.")
-
-  set(ANDROID_PLATFORM "android-${NDK_MIN_PLATFORM_LEVEL}")
-endif()
-
-if(ANDROID_PLATFORM STREQUAL "latest")
-  message(STATUS
-    "Using latest available ANDROID_PLATFORM: ${NDK_MAX_PLATFORM_LEVEL}.")
-  set(ANDROID_PLATFORM "android-${NDK_MAX_PLATFORM_LEVEL}")
-  string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
-endif()
-
-string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
-
-# Aliases defined by meta/platforms.json include codename aliases for platform
-# API levels as well as cover any gaps in platforms that may not have had NDK
-# APIs.
-if(NOT "${NDK_PLATFORM_ALIAS_${ANDROID_PLATFORM_LEVEL}}" STREQUAL "")
-  message(STATUS "\
-${ANDROID_PLATFORM} is an alias for \
-${NDK_PLATFORM_ALIAS_${ANDROID_PLATFORM_LEVEL}}. Adjusting ANDROID_PLATFORM to \
-match.")
-  set(ANDROID_PLATFORM "${NDK_PLATFORM_ALIAS_${ANDROID_PLATFORM_LEVEL}}")
-  string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
-endif()
-
-# Pull up to the minimum supported version if an old API level was requested.
-if(ANDROID_PLATFORM_LEVEL LESS NDK_MIN_PLATFORM_LEVEL)
-  message(STATUS "\
-${ANDROID_PLATFORM} is unsupported. Using minimum supported version \
-${NDK_MIN_PLATFORM_LEVEL}.")
-  set(ANDROID_PLATFORM "android-${NDK_MIN_PLATFORM_LEVEL}")
-  string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
-endif()
-
-# Pull up any ABI-specific minimum API levels.
-set(min_for_abi ${NDK_ABI_${ANDROID_ABI}_MIN_OS_VERSION})
-
-if(ANDROID_PLATFORM_LEVEL LESS min_for_abi)
-  message(STATUS
-    "${ANDROID_PLATFORM} is not supported for ${ANDROID_ABI}. Using minimum "
-    "supported ${ANDROID_ABI} version ${min_for_abi}.")
-  set(ANDROID_PLATFORM android-${min_for_abi})
-  set(ANDROID_PLATFORM_LEVEL ${min_for_abi})
-endif()
-
-# ANDROID_PLATFORM beyond the maximum is an error. The correct way to specify
-# the latest version is ANDROID_PLATFORM=latest.
-if(ANDROID_PLATFORM_LEVEL GREATER NDK_MAX_PLATFORM_LEVEL)
-  message(SEND_ERROR "\
-${ANDROID_PLATFORM} is above the maximum supported version \
-${NDK_MAX_PLATFORM_LEVEL}. Choose a supported API level or set \
-ANDROID_PLATFORM to \"latest\".")
-endif()
-
-if(NOT ANDROID_STL)
-  set(ANDROID_STL c++_static)
-endif()
-
-if("${ANDROID_STL}" STREQUAL "gnustl_shared" OR
-    "${ANDROID_STL}" STREQUAL "gnustl_static" OR
-    "${ANDROID_STL}" STREQUAL "stlport_shared" OR
-    "${ANDROID_STL}" STREQUAL "stlport_static")
-  message(FATAL_ERROR "\
-${ANDROID_STL} is no longer supported. Please switch to either c++_shared or \
-c++_static. See https://developer.android.com/ndk/guides/cpp-support.html \
-for more information.")
-endif()
-
-if("hwaddress" IN_LIST ANDROID_SANITIZE AND "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "c++_static")
-  message(FATAL_ERROR "\
-  hwaddress does not support c++_static. Use system or c++_shared.")
-endif()
-
-set(ANDROID_PIE TRUE)
-if(NOT ANDROID_ARM_MODE)
-  set(ANDROID_ARM_MODE thumb)
-endif()
-
-# Export configurable variables for the try_compile() command.
-set(CMAKE_TRY_COMPILE_PLATFORM_VARIABLES
-  ANDROID_ABI
-  ANDROID_ALLOW_UNDEFINED_SYMBOLS
-  ANDROID_ARM_MODE
-  ANDROID_ARM_NEON
-  ANDROID_CCACHE
-  ANDROID_CPP_FEATURES
-  ANDROID_DISABLE_FORMAT_STRING_CHECKS
-  ANDROID_PIE
-  ANDROID_PLATFORM
-  ANDROID_STL
-  ANDROID_TOOLCHAIN
-  ANDROID_USE_LEGACY_TOOLCHAIN_FILE
-)
-
-# Standard cross-compiling stuff.
-set(ANDROID TRUE)
-set(CMAKE_SYSTEM_NAME Android)
-
-# https://github.com/android-ndk/ndk/issues/890
-#
-# ONLY doesn't do anything when CMAKE_FIND_ROOT_PATH is empty. Without this,
-# CMake will wrongly search host sysroots for headers/libraries. The actual path
-# used here is fairly meaningless since CMake doesn't handle the NDK sysroot
-# layout (per-arch and per-verion subdirectories for libraries), so find_library
-# is handled separately by CMAKE_SYSTEM_LIBRARY_PATH.
-list(APPEND CMAKE_FIND_ROOT_PATH "${ANDROID_NDK}")
-
-# Allow users to override these values in case they want more strict behaviors.
-# For example, they may want to prevent the NDK's libz from being picked up so
-# they can use their own.
-# https://github.com/android-ndk/ndk/issues/517
-if(NOT CMAKE_FIND_ROOT_PATH_MODE_PROGRAM)
-  set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-endif()
-
-if(NOT CMAKE_FIND_ROOT_PATH_MODE_LIBRARY)
-  set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-endif()
-
-if(NOT CMAKE_FIND_ROOT_PATH_MODE_INCLUDE)
-  set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
-endif()
-
-if(NOT CMAKE_FIND_ROOT_PATH_MODE_PACKAGE)
-  set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
-endif()
-
-# ABI.
-set(CMAKE_ANDROID_ARCH_ABI ${ANDROID_ABI})
-if(ANDROID_ABI STREQUAL armeabi-v7a)
-  set(ANDROID_SYSROOT_ABI arm)
-  set(ANDROID_TOOLCHAIN_NAME arm-linux-androideabi)
-  set(CMAKE_SYSTEM_PROCESSOR armv7-a)
-  set(ANDROID_LLVM_TRIPLE armv7-none-linux-androideabi)
-elseif(ANDROID_ABI STREQUAL arm64-v8a)
-  set(ANDROID_SYSROOT_ABI arm64)
-  set(CMAKE_SYSTEM_PROCESSOR aarch64)
-  set(ANDROID_TOOLCHAIN_NAME aarch64-linux-android)
-  set(ANDROID_LLVM_TRIPLE aarch64-none-linux-android)
-elseif(ANDROID_ABI STREQUAL x86)
-  set(ANDROID_SYSROOT_ABI x86)
-  set(CMAKE_SYSTEM_PROCESSOR i686)
-  set(ANDROID_TOOLCHAIN_NAME i686-linux-android)
-  set(ANDROID_LLVM_TRIPLE i686-none-linux-android)
-elseif(ANDROID_ABI STREQUAL x86_64)
-  set(ANDROID_SYSROOT_ABI x86_64)
-  set(CMAKE_SYSTEM_PROCESSOR x86_64)
-  set(ANDROID_TOOLCHAIN_NAME x86_64-linux-android)
-  set(ANDROID_LLVM_TRIPLE x86_64-none-linux-android)
-elseif(ANDROID_ABI STREQUAL riscv64)
-  set(ANDROID_SYSROOT_ABI riscv64)
-  set(CMAKE_SYSTEM_PROCESSOR riscv64)
-  set(ANDROID_TOOLCHAIN_NAME riscv64-linux-android)
-  set(ANDROID_LLVM_TRIPLE riscv64-none-linux-android)
-else()
-  message(FATAL_ERROR "Invalid Android ABI: ${ANDROID_ABI}.")
-endif()
-
-set(ANDROID_LLVM_TRIPLE "${ANDROID_LLVM_TRIPLE}${ANDROID_PLATFORM_LEVEL}")
-
-set(ANDROID_COMPILER_FLAGS)
-set(ANDROID_COMPILER_FLAGS_CXX)
-set(ANDROID_COMPILER_FLAGS_DEBUG)
-set(ANDROID_COMPILER_FLAGS_RELEASE)
-set(ANDROID_LINKER_FLAGS)
-set(ANDROID_LINKER_FLAGS_EXE)
-set(ANDROID_LINKER_FLAGS_RELEASE)
-set(ANDROID_LINKER_FLAGS_RELWITHDEBINFO)
-set(ANDROID_LINKER_FLAGS_MINSIZEREL)
-
-# STL.
-set(ANDROID_CXX_STANDARD_LIBRARIES)
-if(ANDROID_STL STREQUAL system)
-  list(APPEND ANDROID_COMPILER_FLAGS_CXX "-stdlib=libstdc++")
-  if(NOT "x${ANDROID_CPP_FEATURES}" STREQUAL "x")
-    list(APPEND ANDROID_CXX_STANDARD_LIBRARIES "-lc++abi")
-  endif()
-elseif(ANDROID_STL STREQUAL c++_static)
-  list(APPEND ANDROID_LINKER_FLAGS "-static-libstdc++")
-elseif(ANDROID_STL STREQUAL c++_shared)
-elseif(ANDROID_STL STREQUAL none)
-  list(APPEND ANDROID_COMPILER_FLAGS_CXX "-nostdinc++")
-  list(APPEND ANDROID_LINKER_FLAGS "-nostdlib++")
-else()
-  message(FATAL_ERROR "Invalid STL: ${ANDROID_STL}.")
-endif()
-
-if(CMAKE_HOST_SYSTEM_NAME STREQUAL Linux)
-  set(ANDROID_HOST_TAG linux-x86_64)
-elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL Darwin)
-  set(ANDROID_HOST_TAG darwin-x86_64)
-elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL Windows)
-  set(ANDROID_HOST_TAG windows-x86_64)
-endif()
-
-if(CMAKE_HOST_SYSTEM_NAME STREQUAL Windows)
-  set(ANDROID_TOOLCHAIN_SUFFIX .exe)
-endif()
-
-# Toolchain.
-set(ANDROID_TOOLCHAIN_ROOT
-  "${ANDROID_NDK}/toolchains/llvm/prebuilt/${ANDROID_HOST_TAG}")
-
-list(APPEND CMAKE_PREFIX_PATH "${ANDROID_TOOLCHAIN_ROOT}")
-
-# NB: This variable causes CMake to automatically pass --sysroot to the
-# toolchain. Studio currently relies on this to recognize Android builds. If
-# this variable is removed, ensure that flag is still passed.
-# TODO: Teach Studio to recognize Android builds based on --target.
-set(CMAKE_SYSROOT "${ANDROID_TOOLCHAIN_ROOT}/sysroot")
-
-# Allows CMake to find headers in the architecture-specific include directories.
-set(CMAKE_LIBRARY_ARCHITECTURE "${ANDROID_TOOLCHAIN_NAME}")
-
-# In addition to <root>/<prefix>/lib/<arch>, cmake also searches <root>/<prefix>.
-# Adding the API specific path to the beginning of CMAKE_SYSTEM_PREFIX_PATH, to
-# make sure it is searched first.
-set(CMAKE_SYSTEM_PREFIX_PATH
-  "/usr/lib/${ANDROID_TOOLCHAIN_NAME}/${ANDROID_PLATFORM_LEVEL}"
-  "${CMAKE_SYSTEM_PREFIX_PATH}"
-  )
-
-set(ANDROID_HOST_PREBUILTS "${ANDROID_NDK}/prebuilt/${ANDROID_HOST_TAG}")
-
-set(ANDROID_C_COMPILER
-  "${ANDROID_TOOLCHAIN_ROOT}/bin/clang${ANDROID_TOOLCHAIN_SUFFIX}")
-set(ANDROID_CXX_COMPILER
-  "${ANDROID_TOOLCHAIN_ROOT}/bin/clang++${ANDROID_TOOLCHAIN_SUFFIX}")
-set(ANDROID_ASM_COMPILER
-  "${ANDROID_TOOLCHAIN_ROOT}/bin/clang${ANDROID_TOOLCHAIN_SUFFIX}")
-set(CMAKE_C_COMPILER_TARGET   ${ANDROID_LLVM_TRIPLE})
-set(CMAKE_CXX_COMPILER_TARGET ${ANDROID_LLVM_TRIPLE})
-set(CMAKE_ASM_COMPILER_TARGET ${ANDROID_LLVM_TRIPLE})
-set(ANDROID_AR
-  "${ANDROID_TOOLCHAIN_ROOT}/bin/llvm-ar${ANDROID_TOOLCHAIN_SUFFIX}")
-set(ANDROID_RANLIB
-  "${ANDROID_TOOLCHAIN_ROOT}/bin/llvm-ranlib${ANDROID_TOOLCHAIN_SUFFIX}")
-set(ANDROID_STRIP
-  "${ANDROID_TOOLCHAIN_ROOT}/bin/llvm-strip${ANDROID_TOOLCHAIN_SUFFIX}")
-
-if(${CMAKE_VERSION} VERSION_LESS "3.19")
-    # Older CMake won't pass -target when running the compiler identification
-    # test, which causes the test to fail on flags like -mthumb.
-    # https://github.com/android/ndk/issues/1427
-    message(WARNING "An old version of CMake is being used that cannot "
-      "automatically detect compiler attributes. Compiler identification is "
-      "being bypassed. Some values may be wrong or missing. Update to CMake "
-      "3.19 or newer to use CMake's built-in compiler identification.")
-    set(CMAKE_C_COMPILER_ID_RUN TRUE)
-    set(CMAKE_CXX_COMPILER_ID_RUN TRUE)
-    set(CMAKE_C_COMPILER_ID Clang)
-    set(CMAKE_CXX_COMPILER_ID Clang)
-    # No need to auto-detect the computed standard defaults because CMake 3.6
-    # doesn't know about anything past C11 or C++14 (neither does 3.10, so no
-    # need to worry about 3.7-3.9), and any higher standards that Clang might
-    # use are clamped to those values.
-    set(CMAKE_C_STANDARD_COMPUTED_DEFAULT 11)
-    set(CMAKE_CXX_STANDARD_COMPUTED_DEFAULT 14)
-    set(CMAKE_C_COMPILER_FRONTEND_VARIANT "GNU")
-    set(CMAKE_CXX_COMPILER_FRONTEND_VARIANT "GNU")
-    include(${ANDROID_NDK}/build/cmake/compiler_id.cmake)
-endif()
-
-# Generic flags.
-list(APPEND ANDROID_COMPILER_FLAGS
-  -g
-  -DANDROID
-  -fdata-sections
-  -ffunction-sections
-  -funwind-tables
-  -fstack-protector-strong
-  -no-canonical-prefixes)
-
-if(ANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES)
-  list(APPEND ANDROID_COMPILER_FLAGS -D__BIONIC_NO_PAGE_SIZE_MACRO)
-  if(ANDROID_ABI STREQUAL arm64-v8a)
-    list(APPEND ANDROID_LINKER_FLAGS -Wl,-z,max-page-size=16384)
-  endif()
-endif()
-
-if(ANDROID_WEAK_API_DEFS)
-  list(APPEND ANDROID_COMPILER_FLAGS
-      -D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
-      -Werror=unguarded-availability)
-endif()
-
-if("hwaddress" IN_LIST ANDROID_SANITIZE)
-  list(APPEND ANDROID_COMPILER_FLAGS -fsanitize=hwaddress -fno-omit-frame-pointer)
-  list(APPEND ANDROID_LINKER_FLAGS -fsanitize=hwaddress)
-endif()
-
-if("memtag" IN_LIST ANDROID_SANITIZE)
-  list(APPEND ANDROID_COMPILER_FLAGS -fsanitize=memtag-stack -fno-omit-frame-pointer)
-  list(APPEND ANDROID_LINKER_FLAGS -fsanitize=memtag-stack,memtag-heap -fsanitize-memtag-mode=sync)
-  if(ANDROID_ABI STREQUAL arm64-v8a)
-    list(APPEND ANDROID_COMPILER_FLAGS -march=armv8-a+memtag)
-    list(APPEND ANDROID_LINKER_FLAGS -march=armv8-a+memtag)
-  endif()
-endif()
-
-# https://github.com/android/ndk/issues/885
-# If we're using LLD we need to use a slower build-id algorithm to work around
-# the old version of LLDB in Android Studio, which doesn't understand LLD's
-# default hash ("fast").
-list(APPEND ANDROID_LINKER_FLAGS -Wl,--build-id=sha1)
-if(ANDROID_PLATFORM_LEVEL LESS 30)
-  # https://github.com/android/ndk/issues/1196
-  # https://github.com/android/ndk/issues/1589
-  list(APPEND ANDROID_LINKER_FLAGS -Wl,--no-rosegment)
-endif()
-
-if (NOT ANDROID_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS)
-  list(APPEND ANDROID_LINKER_FLAGS -Wl,--no-undefined-version)
-endif()
-
-list(APPEND ANDROID_LINKER_FLAGS -Wl,--fatal-warnings)
-
-# --gc-sections should not be present for debug builds because that can strip
-# functions that the user may want to evaluate while debugging.
-list(APPEND ANDROID_LINKER_FLAGS_RELEASE -Wl,--gc-sections)
-list(APPEND ANDROID_LINKER_FLAGS_RELWITHDEBINFO -Wl,--gc-sections)
-list(APPEND ANDROID_LINKER_FLAGS_MINSIZEREL -Wl,--gc-sections)
-
-# Debug and release flags.
-list(APPEND ANDROID_COMPILER_FLAGS_RELEASE -O3)
-list(APPEND ANDROID_COMPILER_FLAGS_RELEASE -DNDEBUG)
-if(ANDROID_TOOLCHAIN STREQUAL clang)
-  list(APPEND ANDROID_COMPILER_FLAGS_DEBUG -fno-limit-debug-info)
-endif()
-
-# Toolchain and ABI specific flags.
-if(ANDROID_ABI STREQUAL x86 AND ANDROID_PLATFORM_LEVEL LESS 24)
-  # http://b.android.com/222239
-  # http://b.android.com/220159 (internal http://b/31809417)
-  # x86 devices have stack alignment issues.
-  list(APPEND ANDROID_COMPILER_FLAGS -mstackrealign)
-endif()
-
-list(APPEND ANDROID_COMPILER_FLAGS -D_FORTIFY_SOURCE=2)
-
-set(CMAKE_C_STANDARD_LIBRARIES_INIT "-latomic -lm")
-set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "${CMAKE_C_STANDARD_LIBRARIES_INIT}")
-if(ANDROID_CXX_STANDARD_LIBRARIES)
-  string(REPLACE ";" "\" \"" ANDROID_CXX_STANDARD_LIBRARIES "\"${ANDROID_CXX_STANDARD_LIBRARIES}\"")
-  set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "${CMAKE_CXX_STANDARD_LIBRARIES_INIT} ${ANDROID_CXX_STANDARD_LIBRARIES}")
-endif()
-
-# Configuration specific flags.
-
-# PIE is supported on all currently supported Android releases, but it is not
-# supported with static executables, so we still provide ANDROID_PIE as an
-# escape hatch for those.
-if(ANDROID_PIE)
-  set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
-endif()
-
-if(ANDROID_CPP_FEATURES)
-  separate_arguments(ANDROID_CPP_FEATURES)
-  foreach(feature ${ANDROID_CPP_FEATURES})
-    if(NOT ${feature} MATCHES "^(rtti|exceptions|no-rtti|no-exceptions)$")
-      message(FATAL_ERROR "Invalid Android C++ feature: ${feature}.")
-    endif()
-    list(APPEND ANDROID_COMPILER_FLAGS_CXX
-      -f${feature})
-  endforeach()
-  string(REPLACE ";" " " ANDROID_CPP_FEATURES "${ANDROID_CPP_FEATURES}")
-endif()
-if(NOT ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  list(APPEND ANDROID_LINKER_FLAGS
-    -Wl,--no-undefined)
-endif()
-if(ANDROID_ABI MATCHES "armeabi")
-  # Clang does not set this up properly when using -fno-integrated-as.
-  # https://github.com/android-ndk/ndk/issues/906
-  list(APPEND ANDROID_COMPILER_FLAGS "-march=armv7-a")
-  if(ANDROID_ARM_MODE STREQUAL thumb)
-    list(APPEND ANDROID_COMPILER_FLAGS -mthumb)
-  elseif(ANDROID_ARM_MODE STREQUAL arm)
-    # Default behavior.
-  else()
-    message(FATAL_ERROR "Invalid Android ARM mode: ${ANDROID_ARM_MODE}.")
-  endif()
-endif()
-
-# CMake automatically forwards all compiler flags to the linker, and clang
-# doesn't like having -Wa flags being used for linking. To prevent CMake from
-# doing this would require meddling with the CMAKE_<LANG>_COMPILE_OBJECT rules,
-# which would get quite messy.
-list(APPEND ANDROID_LINKER_FLAGS -Qunused-arguments)
-
-if(ANDROID_DISABLE_FORMAT_STRING_CHECKS)
-  list(APPEND ANDROID_COMPILER_FLAGS
-    -Wno-error=format-security)
-else()
-  list(APPEND ANDROID_COMPILER_FLAGS
-    -Wformat -Werror=format-security)
-endif()
-
-# Convert these lists into strings.
-string(REPLACE ";" " " ANDROID_COMPILER_FLAGS         "${ANDROID_COMPILER_FLAGS}")
-string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_CXX     "${ANDROID_COMPILER_FLAGS_CXX}")
-string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_DEBUG   "${ANDROID_COMPILER_FLAGS_DEBUG}")
-string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_RELEASE "${ANDROID_COMPILER_FLAGS_RELEASE}")
-string(REPLACE ";" " " ANDROID_LINKER_FLAGS           "${ANDROID_LINKER_FLAGS}")
-string(REPLACE ";" " " ANDROID_LINKER_FLAGS_EXE       "${ANDROID_LINKER_FLAGS_EXE}")
-string(REPLACE ";" " " ANDROID_LINKER_FLAGS_RELEASE   "${ANDROID_LINKER_FLAGS_RELEASE}")
-string(REPLACE ";" " " ANDROID_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO}")
-string(REPLACE ";" " " ANDROID_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL}")
-
-if(ANDROID_CCACHE)
-  set(CMAKE_C_COMPILER_LAUNCHER   "${ANDROID_CCACHE}")
-  set(CMAKE_CXX_COMPILER_LAUNCHER "${ANDROID_CCACHE}")
-endif()
-set(CMAKE_C_COMPILER "${ANDROID_C_COMPILER}")
-set(CMAKE_CXX_COMPILER "${ANDROID_CXX_COMPILER}")
-set(CMAKE_AR "${ANDROID_AR}" CACHE FILEPATH "Archiver")
-set(CMAKE_RANLIB "${ANDROID_RANLIB}" CACHE FILEPATH "Ranlib")
-set(CMAKE_STRIP "${ANDROID_STRIP}" CACHE FILEPATH "Strip")
-
-if(ANDROID_ABI STREQUAL "x86" OR ANDROID_ABI STREQUAL "x86_64")
-  set(CMAKE_ASM_NASM_COMPILER
-    "${ANDROID_TOOLCHAIN_ROOT}/bin/yasm${ANDROID_TOOLCHAIN_SUFFIX}")
-  set(CMAKE_ASM_NASM_COMPILER_ARG1 "-DELF")
-endif()
-
-# Set or retrieve the cached flags.
-# This is necessary in case the user sets/changes flags in subsequent
-# configures. If we included the Android flags in here, they would get
-# overwritten.
-set(CMAKE_C_FLAGS ""
-  CACHE STRING "Flags used by the compiler during all build types.")
-set(CMAKE_CXX_FLAGS ""
-  CACHE STRING "Flags used by the compiler during all build types.")
-set(CMAKE_ASM_FLAGS ""
-  CACHE STRING "Flags used by the compiler during all build types.")
-set(CMAKE_C_FLAGS_DEBUG ""
-  CACHE STRING "Flags used by the compiler during debug builds.")
-set(CMAKE_CXX_FLAGS_DEBUG ""
-  CACHE STRING "Flags used by the compiler during debug builds.")
-set(CMAKE_ASM_FLAGS_DEBUG ""
-  CACHE STRING "Flags used by the compiler during debug builds.")
-set(CMAKE_C_FLAGS_RELEASE ""
-  CACHE STRING "Flags used by the compiler during release builds.")
-set(CMAKE_CXX_FLAGS_RELEASE ""
-  CACHE STRING "Flags used by the compiler during release builds.")
-set(CMAKE_ASM_FLAGS_RELEASE ""
-  CACHE STRING "Flags used by the compiler during release builds.")
-set(CMAKE_MODULE_LINKER_FLAGS ""
-  CACHE STRING "Flags used by the linker during the creation of modules.")
-set(CMAKE_SHARED_LINKER_FLAGS ""
-  CACHE STRING "Flags used by the linker during the creation of dll's.")
-set(CMAKE_EXE_LINKER_FLAGS ""
-  CACHE STRING "Flags used by the linker.")
-
-set(CMAKE_C_FLAGS             "${ANDROID_COMPILER_FLAGS} ${CMAKE_C_FLAGS}")
-set(CMAKE_CXX_FLAGS           "${ANDROID_COMPILER_FLAGS} ${ANDROID_COMPILER_FLAGS_CXX} ${CMAKE_CXX_FLAGS}")
-set(CMAKE_ASM_FLAGS           "${ANDROID_COMPILER_FLAGS} ${CMAKE_ASM_FLAGS}")
-set(CMAKE_C_FLAGS_DEBUG       "${ANDROID_COMPILER_FLAGS_DEBUG} ${CMAKE_C_FLAGS_DEBUG}")
-set(CMAKE_CXX_FLAGS_DEBUG     "${ANDROID_COMPILER_FLAGS_DEBUG} ${CMAKE_CXX_FLAGS_DEBUG}")
-set(CMAKE_ASM_FLAGS_DEBUG     "${ANDROID_COMPILER_FLAGS_DEBUG} ${CMAKE_ASM_FLAGS_DEBUG}")
-set(CMAKE_C_FLAGS_RELEASE     "${ANDROID_COMPILER_FLAGS_RELEASE} ${CMAKE_C_FLAGS_RELEASE}")
-set(CMAKE_CXX_FLAGS_RELEASE   "${ANDROID_COMPILER_FLAGS_RELEASE} ${CMAKE_CXX_FLAGS_RELEASE}")
-set(CMAKE_ASM_FLAGS_RELEASE   "${ANDROID_COMPILER_FLAGS_RELEASE} ${CMAKE_ASM_FLAGS_RELEASE}")
-set(CMAKE_SHARED_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}")
-set(CMAKE_MODULE_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_MODULE_LINKER_FLAGS}")
-set(CMAKE_EXE_LINKER_FLAGS    "${ANDROID_LINKER_FLAGS} ${ANDROID_LINKER_FLAGS_EXE} ${CMAKE_EXE_LINKER_FLAGS}")
-set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${ANDROID_LINKER_FLAGS_RELEASE} ${CMAKE_SHARED_LINKER_FLAGS_RELEASE}")
-set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${ANDROID_LINKER_FLAGS_RELEASE} ${CMAKE_MODULE_LINKER_FLAGS_RELEASE}")
-set(CMAKE_EXE_LINKER_FLAGS_RELEASE    "${ANDROID_LINKER_FLAGS_RELEASE} ${CMAKE_EXE_LINKER_FLAGS_RELEASE}")
-set(CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO} ${CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO}")
-set(CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO} ${CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO}")
-set(CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO    "${ANDROID_LINKER_FLAGS_RELWITHDEBINFO} ${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO}")
-set(CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL} ${CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL}")
-set(CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL "${ANDROID_LINKER_FLAGS_MINSIZEREL} ${CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL}")
-set(CMAKE_EXE_LINKER_FLAGS_MINSIZEREL    "${ANDROID_LINKER_FLAGS_MINSIZEREL} ${CMAKE_EXE_LINKER_FLAGS_MINSIZEREL}")
-
-# Compatibility for read-only variables.
-# Read-only variables for compatibility with the other toolchain file.
-# We'll keep these around for the existing projects that still use them.
-# TODO: All of the variables here have equivalents in our standard set of
-# configurable variables, so we can remove these once most of our users migrate
-# to those variables.
-set(ANDROID_NATIVE_API_LEVEL ${ANDROID_PLATFORM_LEVEL})
-if(ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  set(ANDROID_SO_UNDEFINED TRUE)
-else()
-  set(ANDROID_NO_UNDEFINED TRUE)
-endif()
-set(ANDROID_FUNCTION_LEVEL_LINKING TRUE)
-set(ANDROID_GOLD_LINKER TRUE)
-set(ANDROID_NOEXECSTACK TRUE)
-set(ANDROID_RELRO TRUE)
-if(ANDROID_ARM_MODE STREQUAL arm)
-  set(ANDROID_FORCE_ARM_BUILD TRUE)
-endif()
-if(ANDROID_CPP_FEATURES MATCHES "rtti"
-    AND ANDROID_CPP_FEATURES MATCHES "exceptions")
-  set(ANDROID_STL_FORCE_FEATURES TRUE)
-endif()
-if(ANDROID_CCACHE)
-  set(NDK_CCACHE "${ANDROID_CCACHE}")
-endif()
-if(ANDROID_TOOLCHAIN STREQUAL clang)
-  set(ANDROID_TOOLCHAIN_NAME ${ANDROID_TOOLCHAIN_NAME}-clang)
-else()
-  set(ANDROID_TOOLCHAIN_NAME ${ANDROID_TOOLCHAIN_NAME}-4.9)
-endif()
-set(ANDROID_NDK_HOST_X64 TRUE)
-set(ANDROID_NDK_LAYOUT RELEASE)
-if(ANDROID_ABI STREQUAL armeabi-v7a)
-  set(ARMEABI_V7A TRUE)
-  if(ANDROID_ARM_NEON)
-    set(NEON TRUE)
-  endif()
-elseif(ANDROID_ABI STREQUAL arm64-v8a)
-  set(ARM64_V8A TRUE)
-elseif(ANDROID_ABI STREQUAL x86)
-  set(X86 TRUE)
-elseif(ANDROID_ABI STREQUAL x86_64)
-  set(X86_64 TRUE)
-elseif(ANDROID_ABI STREQUAL riscv64)
-  set(RISCV64 TRUE)
-endif()
-set(ANDROID_NDK_HOST_SYSTEM_NAME ${ANDROID_HOST_TAG})
-set(ANDROID_NDK_ABI_NAME ${ANDROID_ABI})
-set(ANDROID_NDK_RELEASE r${ANDROID_NDK_REVISION})
-set(ANDROID_ARCH_NAME ${ANDROID_SYSROOT_ABI})
-set(TOOL_OS_SUFFIX ${ANDROID_TOOLCHAIN_SUFFIX})
-if(ANDROID_TOOLCHAIN STREQUAL clang)
-  set(ANDROID_COMPILER_IS_CLANG TRUE)
-endif()
-
-# CMake 3.7+ compatibility.
-if (CMAKE_VERSION VERSION_GREATER 3.7.0)
-  set(CMAKE_ANDROID_NDK ${ANDROID_NDK})
-  set(CMAKE_ANDROID_NDK_TOOLCHAIN_VERSION clang)
-
-  set(CMAKE_ANDROID_STL_TYPE ${ANDROID_STL})
-
-  if(ANDROID_ABI MATCHES "^armeabi(-v7a)?$")
-    set(CMAKE_ANDROID_ARM_NEON ${ANDROID_ARM_NEON})
-    set(CMAKE_ANDROID_ARM_MODE ${ANDROID_ARM_MODE})
-  endif()
-
-  # https://github.com/android/ndk/issues/861
-  if(ANDROID_ABI STREQUAL armeabi-v7a)
-    set(CMAKE_ANDROID_ARCH arm)
-  elseif(ANDROID_ABI STREQUAL arm64-v8a)
-    set(CMAKE_ANDROID_ARCH arm64)
-  elseif(ANDROID_ABI STREQUAL x86)
-    set(CMAKE_ANDROID_ARCH x86)
-  elseif(ANDROID_ABI STREQUAL x86_64)
-    set(CMAKE_ANDROID_ARCH x86_64)
-  elseif(ANDROID_ABI STREQUAL riscv64)
-    set(CMAKE_ANDROID_ARCH riscv64)
-  endif()
-
-  # https://github.com/android/ndk/issues/1012
-  set(CMAKE_ASM_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
-  set(CMAKE_C_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
-  set(CMAKE_CXX_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
-
-  set(CMAKE_ASM_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
-  set(CMAKE_C_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
-  set(CMAKE_CXX_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
-endif()
diff --git a/build/cmake/android.toolchain.cmake b/build/cmake/android.toolchain.cmake
index 48cee3f..cf5312f 100644
--- a/build/cmake/android.toolchain.cmake
+++ b/build/cmake/android.toolchain.cmake
@@ -28,12 +28,15 @@
 # ANDROID_CPP_FEATURES
 # ANDROID_ALLOW_UNDEFINED_SYMBOLS
 # ANDROID_ARM_MODE
+# ANDROID_ARM_NEON
 # ANDROID_DISABLE_FORMAT_STRING_CHECKS
 # ANDROID_CCACHE
-# ANDROID_SANITIZE
 
 cmake_minimum_required(VERSION 3.6.0)
 
+# Inhibit all of CMake's own NDK handling code.
+set(CMAKE_SYSTEM_VERSION 1)
+
 # CMake invokes the toolchain file twice during the first build, but only once
 # during subsequent rebuilds. This was causing the various flags to be added
 # twice on the first build, and on a rebuild ninja would see only one set of the
@@ -44,23 +47,11 @@
 endif(ANDROID_NDK_TOOLCHAIN_INCLUDED)
 set(ANDROID_NDK_TOOLCHAIN_INCLUDED true)
 
-if(DEFINED ANDROID_USE_LEGACY_TOOLCHAIN_FILE)
-  set(_USE_LEGACY_TOOLCHAIN_FILE ${ANDROID_USE_LEGACY_TOOLCHAIN_FILE})
-else()
-  # Default to the legacy toolchain file to avoid changing the behavior of
-  # CMAKE_CXX_FLAGS. See https://github.com/android/ndk/issues/1693.
-  set(_USE_LEGACY_TOOLCHAIN_FILE true)
-endif()
-if(_USE_LEGACY_TOOLCHAIN_FILE)
-  include("${CMAKE_CURRENT_LIST_DIR}/android-legacy.toolchain.cmake")
-  return()
-endif()
-
-# Android NDK path
+# Android NDK
 get_filename_component(ANDROID_NDK_EXPECTED_PATH
     "${CMAKE_CURRENT_LIST_DIR}/../.." ABSOLUTE)
 if(NOT ANDROID_NDK)
-  set(CMAKE_ANDROID_NDK "${ANDROID_NDK_EXPECTED_PATH}")
+  set(ANDROID_NDK "${ANDROID_NDK_EXPECTED_PATH}")
 else()
   # Allow the user to specify their own NDK path, but emit a warning. This is an
   # uncommon use case, but helpful if users want to use a bleeding edge
@@ -70,10 +61,9 @@
   if(NOT "${ANDROID_NDK}" STREQUAL "${ANDROID_NDK_EXPECTED_PATH}")
     message(WARNING "Using custom NDK path (ANDROID_NDK is set): ${ANDROID_NDK}")
   endif()
-  set(CMAKE_ANDROID_NDK ${ANDROID_NDK})
 endif()
 unset(ANDROID_NDK_EXPECTED_PATH)
-file(TO_CMAKE_PATH "${CMAKE_ANDROID_NDK}" CMAKE_ANDROID_NDK)
+file(TO_CMAKE_PATH "${ANDROID_NDK}" ANDROID_NDK)
 
 # Android NDK revision
 # Possible formats:
@@ -82,12 +72,12 @@
 # * r16 beta 1, build 1234: 16.0.1234-beta1
 #
 # Canary builds are not specially marked.
-file(READ "${CMAKE_ANDROID_NDK}/source.properties" ANDROID_NDK_SOURCE_PROPERTIES)
+file(READ "${ANDROID_NDK}/source.properties" ANDROID_NDK_SOURCE_PROPERTIES)
 
 set(ANDROID_NDK_REVISION_REGEX
   "^Pkg\\.Desc = Android NDK\nPkg\\.Revision = ([0-9]+)\\.([0-9]+)\\.([0-9]+)(-beta([0-9]+))?")
 if(NOT ANDROID_NDK_SOURCE_PROPERTIES MATCHES "${ANDROID_NDK_REVISION_REGEX}")
-  message(SEND_ERROR "Failed to parse Android NDK revision: ${CMAKE_ANDROID_NDK}/source.properties.\n${ANDROID_NDK_SOURCE_PROPERTIES}")
+  message(SEND_ERROR "Failed to parse Android NDK revision: ${ANDROID_NDK}/source.properties.\n${ANDROID_NDK_SOURCE_PROPERTIES}")
 endif()
 
 set(ANDROID_NDK_MAJOR "${CMAKE_MATCH_1}")
@@ -105,61 +95,69 @@
 if(CMAKE_TOOLCHAIN_FILE)
 endif()
 
-# Determine the ABI.
-if(NOT CMAKE_ANDROID_ARCH_ABI)
-  if(ANDROID_ABI STREQUAL "armeabi-v7a with NEON")
-    set(CMAKE_ANDROID_ARCH_ABI armeabi-v7a)
-  elseif(ANDROID_ABI)
-    set(CMAKE_ANDROID_ARCH_ABI ${ANDROID_ABI})
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^arm-linux-androideabi-")
-    set(CMAKE_ANDROID_ARCH_ABI armeabi-v7a)
+# Compatibility for configurable variables.
+# Compatible with configurable variables from the other toolchain file:
+#         https://github.com/taka-no-me/android-cmake
+# TODO: We should consider dropping compatibility to simplify things once most
+# of our users have migrated to our standard set of configurable variables.
+if(ANDROID_TOOLCHAIN_NAME AND NOT ANDROID_TOOLCHAIN)
+  if(ANDROID_TOOLCHAIN_NAME MATCHES "-clang([0-9].[0-9])?$")
+    set(ANDROID_TOOLCHAIN clang)
+  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "-[0-9].[0-9]$")
+    set(ANDROID_TOOLCHAIN gcc)
+  endif()
+endif()
+if(ANDROID_ABI STREQUAL "armeabi-v7a with NEON")
+  set(ANDROID_ABI armeabi-v7a)
+  set(ANDROID_ARM_NEON TRUE)
+elseif(ANDROID_TOOLCHAIN_NAME AND NOT ANDROID_ABI)
+  if(ANDROID_TOOLCHAIN_NAME MATCHES "^arm-linux-androideabi-")
+    set(ANDROID_ABI armeabi-v7a)
   elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^aarch64-linux-android-")
-    set(CMAKE_ANDROID_ARCH_ABI arm64-v8a)
+    set(ANDROID_ABI arm64-v8a)
   elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^x86-")
-    set(CMAKE_ANDROID_ARCH_ABI x86)
+    set(ANDROID_ABI x86)
   elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^x86_64-")
-    set(CMAKE_ANDROID_ARCH_ABI x86_64)
-  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^riscv64-")
-    set(CMAKE_ANDROID_ARCH_ABI riscv64)
+    set(ANDROID_ABI x86_64)
+  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^mipsel-linux-android-")
+    set(ANDROID_ABI mips)
+  elseif(ANDROID_TOOLCHAIN_NAME MATCHES "^mips64el-linux-android-")
+    set(ANDROID_ABI mips64)
+  endif()
+endif()
+if(ANDROID_NATIVE_API_LEVEL AND NOT ANDROID_PLATFORM)
+  if(ANDROID_NATIVE_API_LEVEL MATCHES "^android-[0-9]+$")
+    set(ANDROID_PLATFORM ${ANDROID_NATIVE_API_LEVEL})
+  elseif(ANDROID_NATIVE_API_LEVEL MATCHES "^[0-9]+$")
+    set(ANDROID_PLATFORM android-${ANDROID_NATIVE_API_LEVEL})
+  endif()
+endif()
+if(DEFINED ANDROID_APP_PIE AND NOT DEFINED ANDROID_PIE)
+  set(ANDROID_PIE "${ANDROID_APP_PIE}")
+endif()
+if(ANDROID_STL_FORCE_FEATURES AND NOT DEFINED ANDROID_CPP_FEATURES)
+  set(ANDROID_CPP_FEATURES "rtti exceptions")
+endif()
+if(DEFINED ANDROID_NO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
+  if(ANDROID_NO_UNDEFINED)
+    set(ANDROID_ALLOW_UNDEFINED_SYMBOLS FALSE)
   else()
-    set(CMAKE_ANDROID_ARCH_ABI armeabi-v7a)
+    set(ANDROID_ALLOW_UNDEFINED_SYMBOLS TRUE)
   endif()
 endif()
-
-if(DEFINED ANDROID_ARM_NEON AND NOT ANDROID_ARM_NEON)
-  message(FATAL_ERROR "Disabling Neon is no longer supported")
+if(DEFINED ANDROID_SO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
+  set(ANDROID_ALLOW_UNDEFINED_SYMBOLS "${ANDROID_SO_UNDEFINED}")
 endif()
-
-if(CMAKE_ANDROID_ARCH_ABI STREQUAL "armeabi-v7a")
-  set(CMAKE_ANDROID_ARM_NEON TRUE)
-
-  if(NOT DEFINED CMAKE_ANDROID_ARM_MODE)
-    if(DEFINED ANDROID_FORCE_ARM_BUILD)
-      set(CMAKE_ANDROID_ARM_MODE ${ANDROID_FORCE_ARM_BUILD})
-    elseif(DEFINED ANDROID_ARM_MODE)
-      if(ANDROID_ARM_MODE STREQUAL "arm")
-        set(CMAKE_ANDROID_ARM_MODE TRUE)
-      elseif(ANDROID_ARM_MODE STREQUAL "thumb")
-        set(CMAKE_ANDROID_ARM_MODE FALSE)
-      else()
-        message(FATAL_ERROR "Invalid Android ARM mode: ${ANDROID_ARM_MODE}.")
-      endif()
-    endif()
-  endif()
-endif()
-
-# PIE is supported on all currently supported Android releases, but it is not
-# supported with static executables, so we still provide ANDROID_PIE as an
-# escape hatch for those.
-if(NOT DEFINED CMAKE_POSITION_INDEPENDENT_CODE)
-  if(DEFINED ANDROID_PIE)
-    set(CMAKE_POSITION_INDEPENDENT_CODE ${ANDROID_PIE})
-  elseif(DEFINED ANDROID_APP_PIE)
-    set(CMAKE_POSITION_INDEPENDENT_CODE ${ANDROID_APP_PIE})
+if(DEFINED ANDROID_FORCE_ARM_BUILD AND NOT ANDROID_ARM_MODE)
+  if(ANDROID_FORCE_ARM_BUILD)
+    set(ANDROID_ARM_MODE arm)
   else()
-    set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
+    set(ANDROID_ARM_MODE thumb)
   endif()
 endif()
+if(NDK_CCACHE AND NOT ANDROID_CCACHE)
+  set(ANDROID_CCACHE "${NDK_CCACHE}")
+endif()
 
 # Default values for configurable variables.
 if(NOT ANDROID_TOOLCHAIN)
@@ -168,77 +166,96 @@
   message(FATAL_ERROR "GCC is no longer supported. See "
   "https://android.googlesource.com/platform/ndk/+/master/docs/ClangMigration.md.")
 endif()
-
-if(ANDROID_NATIVE_API_LEVEL AND NOT ANDROID_PLATFORM)
-  if(ANDROID_NATIVE_API_LEVEL MATCHES "^android-[0-9]+$")
-    set(ANDROID_PLATFORM ${ANDROID_NATIVE_API_LEVEL})
-  elseif(ANDROID_NATIVE_API_LEVEL MATCHES "^[0-9]+$")
-    set(ANDROID_PLATFORM android-${ANDROID_NATIVE_API_LEVEL})
-  endif()
-endif()
-include(${CMAKE_ANDROID_NDK}/build/cmake/adjust_api_level.cmake)
-adjust_api_level("${ANDROID_PLATFORM}" CMAKE_SYSTEM_VERSION)
-
-if(NOT DEFINED CMAKE_ANDROID_STL_TYPE AND DEFINED ANDROID_STL)
-  set(CMAKE_ANDROID_STL_TYPE ${ANDROID_STL})
+if(NOT ANDROID_ABI)
+  set(ANDROID_ABI armeabi-v7a)
 endif()
 
-if("hwaddress" IN_LIST ANDROID_SANITIZE AND "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "c++_static")
+if(ANDROID_ABI STREQUAL armeabi)
+  message(FATAL_ERROR "armeabi is no longer supported. Use armeabi-v7a.")
+elseif(ANDROID_ABI MATCHES "^(mips|mips64)$")
+  message(FATAL_ERROR "MIPS and MIPS64 are no longer supported.")
+endif()
+
+if(ANDROID_ABI STREQUAL armeabi-v7a AND NOT DEFINED ANDROID_ARM_NEON)
+  set(ANDROID_ARM_NEON TRUE)
+endif()
+
+include(${ANDROID_NDK}/build/cmake/platforms.cmake)
+
+# If no platform version was chosen by the user, default to the minimum version
+# supported by this NDK.
+if(NOT ANDROID_PLATFORM)
+  message(STATUS "\
+ANDROID_PLATFORM not set. Defaulting to minimum supported version
+${NDK_MIN_PLATFORM_LEVEL}.")
+
+  set(ANDROID_PLATFORM "android-${NDK_MIN_PLATFORM_LEVEL}")
+endif()
+
+if(ANDROID_PLATFORM STREQUAL "latest")
+  message(STATUS
+    "Using latest available ANDROID_PLATFORM: ${NDK_MAX_PLATFORM_LEVEL}.")
+  set(ANDROID_PLATFORM "android-${NDK_MAX_PLATFORM_LEVEL}")
+  string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
+endif()
+
+string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
+
+# Aliases defined by meta/platforms.json include codename aliases for platform
+# API levels as well as cover any gaps in platforms that may not have had NDK
+# APIs.
+if(NOT "${NDK_PLATFORM_ALIAS_${ANDROID_PLATFORM_LEVEL}}" STREQUAL "")
+  message(STATUS "\
+${ANDROID_PLATFORM} is an alias for \
+${NDK_PLATFORM_ALIAS_${ANDROID_PLATFORM_LEVEL}}. Adjusting ANDROID_PLATFORM to \
+match.")
+  set(ANDROID_PLATFORM "${NDK_PLATFORM_ALIAS_${ANDROID_PLATFORM_LEVEL}}")
+  string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
+endif()
+
+# Pull up to the minimum supported version if an old API level was requested.
+if(ANDROID_PLATFORM_LEVEL LESS NDK_MIN_PLATFORM_LEVEL)
+  message(STATUS "\
+${ANDROID_PLATFORM} is unsupported. Using minimum supported version \
+${NDK_MIN_PLATFORM_LEVEL}.")
+  set(ANDROID_PLATFORM "android-${NDK_MIN_PLATFORM_LEVEL}")
+  string(REPLACE "android-" "" ANDROID_PLATFORM_LEVEL ${ANDROID_PLATFORM})
+endif()
+
+# And for LP64 we need to pull up to 21. No diagnostic is provided here because
+# minSdkVersion < 21 is valid for the project even though it may not be for this
+# ABI.
+if(ANDROID_ABI MATCHES "64(-v8a)?$" AND ANDROID_PLATFORM_LEVEL LESS 21)
+  set(ANDROID_PLATFORM android-21)
+  set(ANDROID_PLATFORM_LEVEL 21)
+endif()
+
+# ANDROID_PLATFORM beyond the maximum is an error. The correct way to specify
+# the latest version is ANDROID_PLATFORM=latest.
+if(ANDROID_PLATFORM_LEVEL GREATER NDK_MAX_PLATFORM_LEVEL)
+  message(SEND_ERROR "\
+${ANDROID_PLATFORM} is above the maximum supported version \
+${NDK_MAX_PLATFORM_LEVEL}. Choose a supported API level or set \
+ANDROID_PLATFORM to \"latest\".")
+endif()
+
+if(NOT ANDROID_STL)
+  set(ANDROID_STL c++_static)
+endif()
+
+if("${ANDROID_STL}" STREQUAL "gnustl_shared" OR
+    "${ANDROID_STL}" STREQUAL "gnustl_static" OR
+    "${ANDROID_STL}" STREQUAL "stlport_shared" OR
+    "${ANDROID_STL}" STREQUAL "stlport_static")
   message(FATAL_ERROR "\
-  hwaddress does not support c++_static. Use system or c++_shared.")
-endif()
-
-if("${CMAKE_ANDROID_STL_TYPE}" STREQUAL "gnustl_shared" OR
-    "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "gnustl_static" OR
-    "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "stlport_shared" OR
-    "${CMAKE_ANDROID_STL_TYPE}" STREQUAL "stlport_static")
-  message(FATAL_ERROR "\
-${CMAKE_ANDROID_STL_TYPE} is no longer supported. Please switch to either c++_shared \
-or c++_static. See https://developer.android.com/ndk/guides/cpp-support.html \
+${ANDROID_STL} is no longer supported. Please switch to either c++_shared or \
+c++_static. See https://developer.android.com/ndk/guides/cpp-support.html \
 for more information.")
 endif()
 
-# Standard cross-compiling stuff.
-set(CMAKE_SYSTEM_NAME Android)
-
-# STL.
-if(ANDROID_STL)
-  set(CMAKE_ANDROID_STL_TYPE ${ANDROID_STL})
-endif()
-
-if(NDK_CCACHE AND NOT ANDROID_CCACHE)
-  set(ANDROID_CCACHE "${NDK_CCACHE}")
-endif()
-if(ANDROID_CCACHE)
-  set(CMAKE_C_COMPILER_LAUNCHER   "${ANDROID_CCACHE}")
-  set(CMAKE_CXX_COMPILER_LAUNCHER "${ANDROID_CCACHE}")
-endif()
-
-# Configuration specific flags.
-if(ANDROID_STL_FORCE_FEATURES AND NOT DEFINED ANDROID_CPP_FEATURES)
-  set(ANDROID_CPP_FEATURES "rtti exceptions")
-endif()
-
-if(ANDROID_CPP_FEATURES)
-  separate_arguments(ANDROID_CPP_FEATURES)
-  foreach(feature ${ANDROID_CPP_FEATURES})
-    if(NOT ${feature} MATCHES "^(rtti|exceptions|no-rtti|no-exceptions)$")
-      message(FATAL_ERROR "Invalid Android C++ feature: ${feature}.")
-    endif()
-    if(${feature} STREQUAL "rtti")
-      set(CMAKE_ANDROID_RTTI TRUE)
-    endif()
-    if(${feature} STREQUAL "no-rtti")
-      set(CMAKE_ANDROID_RTTI FALSE)
-    endif()
-    if(${feature} STREQUAL "exceptions")
-      set(CMAKE_ANDROID_EXCEPTIONS TRUE)
-    endif()
-    if(${feature} STREQUAL "no-exceptions")
-      set(CMAKE_ANDROID_EXCEPTIONS FALSE)
-    endif()
-  endforeach()
-  string(REPLACE ";" " " ANDROID_CPP_FEATURES "${ANDROID_CPP_FEATURES}")
+set(ANDROID_PIE TRUE)
+if(NOT ANDROID_ARM_MODE)
+  set(ANDROID_ARM_MODE thumb)
 endif()
 
 # Export configurable variables for the try_compile() command.
@@ -250,27 +267,108 @@
   ANDROID_CCACHE
   ANDROID_CPP_FEATURES
   ANDROID_DISABLE_FORMAT_STRING_CHECKS
+  ANDROID_LD
   ANDROID_PIE
   ANDROID_PLATFORM
   ANDROID_STL
   ANDROID_TOOLCHAIN
-  ANDROID_USE_LEGACY_TOOLCHAIN_FILE
-  ANDROID_SANITIZE
 )
 
-if(DEFINED ANDROID_NO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  if(ANDROID_NO_UNDEFINED)
-    set(ANDROID_ALLOW_UNDEFINED_SYMBOLS FALSE)
-  else()
-    set(ANDROID_ALLOW_UNDEFINED_SYMBOLS TRUE)
-  endif()
-endif()
-if(DEFINED ANDROID_SO_UNDEFINED AND NOT DEFINED ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  set(ANDROID_ALLOW_UNDEFINED_SYMBOLS "${ANDROID_SO_UNDEFINED}")
+# Standard cross-compiling stuff.
+set(ANDROID TRUE)
+set(CMAKE_SYSTEM_NAME Android)
+
+# https://github.com/android-ndk/ndk/issues/890
+#
+# ONLY doesn't do anything when CMAKE_FIND_ROOT_PATH is empty. Without this,
+# CMake will wrongly search host sysroots for headers/libraries. The actual path
+# used here is fairly meaningless since CMake doesn't handle the NDK sysroot
+# layout (per-arch and per-verion subdirectories for libraries), so find_library
+# is handled separately by CMAKE_SYSTEM_LIBRARY_PATH.
+list(APPEND CMAKE_FIND_ROOT_PATH "${ANDROID_NDK}")
+
+# Allow users to override these values in case they want more strict behaviors.
+# For example, they may want to prevent the NDK's libz from being picked up so
+# they can use their own.
+# https://github.com/android-ndk/ndk/issues/517
+if(NOT CMAKE_FIND_ROOT_PATH_MODE_PROGRAM)
+  set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
 endif()
 
-# Exports compatible variables defined in exports.cmake.
-set(_ANDROID_EXPORT_COMPATIBILITY_VARIABLES TRUE)
+if(NOT CMAKE_FIND_ROOT_PATH_MODE_LIBRARY)
+  set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
+endif()
+
+if(NOT CMAKE_FIND_ROOT_PATH_MODE_INCLUDE)
+  set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
+endif()
+
+if(NOT CMAKE_FIND_ROOT_PATH_MODE_PACKAGE)
+  set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
+endif()
+
+# ABI.
+set(CMAKE_ANDROID_ARCH_ABI ${ANDROID_ABI})
+if(ANDROID_ABI STREQUAL armeabi-v7a)
+  set(ANDROID_SYSROOT_ABI arm)
+  set(ANDROID_TOOLCHAIN_NAME arm-linux-androideabi)
+  set(CMAKE_SYSTEM_PROCESSOR armv7-a)
+  set(ANDROID_LLVM_TRIPLE armv7-none-linux-androideabi)
+elseif(ANDROID_ABI STREQUAL arm64-v8a)
+  set(ANDROID_SYSROOT_ABI arm64)
+  set(CMAKE_SYSTEM_PROCESSOR aarch64)
+  set(ANDROID_TOOLCHAIN_NAME aarch64-linux-android)
+  set(ANDROID_LLVM_TRIPLE aarch64-none-linux-android)
+elseif(ANDROID_ABI STREQUAL x86)
+  set(ANDROID_SYSROOT_ABI x86)
+  set(CMAKE_SYSTEM_PROCESSOR i686)
+  set(ANDROID_TOOLCHAIN_NAME i686-linux-android)
+  set(ANDROID_LLVM_TRIPLE i686-none-linux-android)
+elseif(ANDROID_ABI STREQUAL x86_64)
+  set(ANDROID_SYSROOT_ABI x86_64)
+  set(CMAKE_SYSTEM_PROCESSOR x86_64)
+  set(ANDROID_TOOLCHAIN_NAME x86_64-linux-android)
+  set(ANDROID_LLVM_TRIPLE x86_64-none-linux-android)
+else()
+  message(FATAL_ERROR "Invalid Android ABI: ${ANDROID_ABI}.")
+endif()
+
+set(ANDROID_LLVM_TRIPLE "${ANDROID_LLVM_TRIPLE}${ANDROID_PLATFORM_LEVEL}")
+
+set(ANDROID_COMPILER_FLAGS)
+set(ANDROID_COMPILER_FLAGS_CXX)
+set(ANDROID_COMPILER_FLAGS_DEBUG)
+set(ANDROID_COMPILER_FLAGS_RELEASE)
+set(ANDROID_LINKER_FLAGS)
+set(ANDROID_LINKER_FLAGS_EXE)
+
+if(ANDROID_LD STREQUAL lld)
+  list(APPEND ANDROID_LINKER_FLAGS -fuse-ld=lld)
+endif()
+
+# Don't re-export libgcc symbols in every binary.
+list(APPEND ANDROID_LINKER_FLAGS -Wl,--exclude-libs,libgcc_real.a)
+list(APPEND ANDROID_LINKER_FLAGS -Wl,--exclude-libs,libatomic.a)
+
+# STL.
+set(ANDROID_CXX_STANDARD_LIBRARIES)
+if(ANDROID_STL STREQUAL system)
+  list(APPEND ANDROID_COMPILER_FLAGS_CXX "-stdlib=libstdc++")
+  if(NOT "x${ANDROID_CPP_FEATURES}" STREQUAL "x")
+    list(APPEND ANDROID_CXX_STANDARD_LIBRARIES "-lc++abi")
+    if(ANDROID_PLATFORM_LEVEL LESS 21)
+      list(APPEND ANDROID_CXX_STANDARD_LIBRARIES "-landroid_support")
+    endif()
+  endif()
+elseif(ANDROID_STL STREQUAL c++_static)
+  list(APPEND ANDROID_LINKER_FLAGS "-static-libstdc++")
+elseif(ANDROID_STL STREQUAL c++_shared)
+elseif(ANDROID_STL STREQUAL none)
+  list(APPEND ANDROID_COMPILER_FLAGS_CXX "-nostdinc++")
+  list(APPEND ANDROID_LINKER_FLAGS "-nostdlib++")
+else()
+  message(FATAL_ERROR "Invalid Android STL: ${ANDROID_STL}.")
+endif()
 
 if(CMAKE_HOST_SYSTEM_NAME STREQUAL Linux)
   set(ANDROID_HOST_TAG linux-x86_64)
@@ -280,12 +378,342 @@
   set(ANDROID_HOST_TAG windows-x86_64)
 endif()
 
+if(CMAKE_HOST_SYSTEM_NAME STREQUAL Windows)
+  set(ANDROID_TOOLCHAIN_SUFFIX .exe)
+endif()
+
 # Toolchain.
 set(ANDROID_TOOLCHAIN_ROOT
-  "${CMAKE_ANDROID_NDK}/toolchains/llvm/prebuilt/${ANDROID_HOST_TAG}")
+  "${ANDROID_NDK}/toolchains/llvm/prebuilt/${ANDROID_HOST_TAG}")
+set(ANDROID_TOOLCHAIN_PREFIX
+  "${ANDROID_TOOLCHAIN_ROOT}/bin/${ANDROID_TOOLCHAIN_NAME}-")
+
+list(APPEND CMAKE_PREFIX_PATH "${ANDROID_TOOLCHAIN_ROOT}")
+
+# find_library searches a handful of paths as described by
+# https://cmake.org/cmake/help/v3.6/command/find_library.html. CMake doesn't
+# understand the Android sysroot layout, so we need to give the direct path to
+# the libraries rather than just the sysroot. Set up CMAKE_SYSTEM_LIBRARY_PATH
+# (https://cmake.org/cmake/help/v3.6/variable/CMAKE_SYSTEM_LIBRARY_PATH.html)
+# instead.
 
 # NB: This variable causes CMake to automatically pass --sysroot to the
 # toolchain. Studio currently relies on this to recognize Android builds. If
 # this variable is removed, ensure that flag is still passed.
 # TODO: Teach Studio to recognize Android builds based on --target.
 set(CMAKE_SYSROOT "${ANDROID_TOOLCHAIN_ROOT}/sysroot")
+
+# Allows CMake to find headers in the architecture-specific include directories.
+set(CMAKE_LIBRARY_ARCHITECTURE "${ANDROID_TOOLCHAIN_NAME}")
+
+# Instructs CMake to search the correct API level for libraries.
+list(APPEND CMAKE_SYSTEM_LIBRARY_PATH
+  "/usr/lib/${ANDROID_TOOLCHAIN_NAME}/${ANDROID_PLATFORM_LEVEL}")
+
+set(ANDROID_HOST_PREBUILTS "${ANDROID_NDK}/prebuilt/${ANDROID_HOST_TAG}")
+
+set(ANDROID_C_COMPILER
+  "${ANDROID_TOOLCHAIN_ROOT}/bin/clang${ANDROID_TOOLCHAIN_SUFFIX}")
+set(ANDROID_CXX_COMPILER
+  "${ANDROID_TOOLCHAIN_ROOT}/bin/clang++${ANDROID_TOOLCHAIN_SUFFIX}")
+set(ANDROID_ASM_COMPILER
+  "${ANDROID_TOOLCHAIN_ROOT}/bin/clang${ANDROID_TOOLCHAIN_SUFFIX}")
+# Clang can fail to compile if CMake doesn't correctly supply the target and
+# external toolchain, but to do so, CMake needs to already know that the
+# compiler is clang. Tell CMake that the compiler is really clang, but don't
+# use CMakeForceCompiler, since we still want compile checks. We only want
+# to skip the compiler ID detection step.
+set(CMAKE_C_COMPILER_ID_RUN TRUE)
+set(CMAKE_CXX_COMPILER_ID_RUN TRUE)
+set(CMAKE_C_COMPILER_ID Clang)
+set(CMAKE_CXX_COMPILER_ID Clang)
+set(CMAKE_C_COMPILER_VERSION 9.0)
+set(CMAKE_CXX_COMPILER_VERSION 9.0)
+set(CMAKE_C_STANDARD_COMPUTED_DEFAULT 11)
+set(CMAKE_CXX_STANDARD_COMPUTED_DEFAULT 14)
+set(CMAKE_C_COMPILER_TARGET   ${ANDROID_LLVM_TRIPLE})
+set(CMAKE_C_COMPILER_FRONTEND_VARIANT "GNU")
+set(CMAKE_CXX_COMPILER_FRONTEND_VARIANT "GNU")
+set(CMAKE_CXX_COMPILER_TARGET ${ANDROID_LLVM_TRIPLE})
+set(CMAKE_ASM_COMPILER_TARGET ${ANDROID_LLVM_TRIPLE})
+set(CMAKE_C_COMPILER_EXTERNAL_TOOLCHAIN   "${ANDROID_TOOLCHAIN_ROOT}")
+set(CMAKE_CXX_COMPILER_EXTERNAL_TOOLCHAIN "${ANDROID_TOOLCHAIN_ROOT}")
+set(CMAKE_ASM_COMPILER_EXTERNAL_TOOLCHAIN "${ANDROID_TOOLCHAIN_ROOT}")
+set(ANDROID_AR "${ANDROID_TOOLCHAIN_PREFIX}ar${ANDROID_TOOLCHAIN_SUFFIX}")
+set(ANDROID_RANLIB
+  "${ANDROID_TOOLCHAIN_PREFIX}ranlib${ANDROID_TOOLCHAIN_SUFFIX}")
+
+# Generic flags.
+list(APPEND ANDROID_COMPILER_FLAGS
+  -g
+  -DANDROID
+  -fdata-sections
+  -ffunction-sections
+  -funwind-tables
+  -fstack-protector-strong
+  -no-canonical-prefixes)
+
+# https://github.com/android/ndk/issues/885
+# If we're using LLD we need to use a slower build-id algorithm to work around
+# the old version of LLDB in Android Studio, which doesn't understand LLD's
+# default hash ("fast").
+#
+# Note that because we cannot see the user's flags, we can't detect this very
+# accurately. Users that explicitly use -fuse-ld=lld instead of ANDROID_LD will
+# not be able to debug.
+if(ANDROID_LD STREQUAL lld)
+  list(APPEND ANDROID_LINKER_FLAGS -Wl,--build-id=sha1)
+else()
+  list(APPEND ANDROID_LINKER_FLAGS -Wl,--build-id)
+endif()
+
+list(APPEND ANDROID_LINKER_FLAGS -Wl,--fatal-warnings)
+list(APPEND ANDROID_LINKER_FLAGS_EXE -Wl,--gc-sections)
+
+# Debug and release flags.
+list(APPEND ANDROID_COMPILER_FLAGS_DEBUG -O0)
+if(ANDROID_ABI MATCHES "^armeabi" AND ANDROID_ARM_MODE STREQUAL thumb)
+  list(APPEND ANDROID_COMPILER_FLAGS_RELEASE -Oz)
+else()
+  list(APPEND ANDROID_COMPILER_FLAGS_RELEASE -O2)
+endif()
+list(APPEND ANDROID_COMPILER_FLAGS_RELEASE -DNDEBUG)
+if(ANDROID_TOOLCHAIN STREQUAL clang)
+  list(APPEND ANDROID_COMPILER_FLAGS_DEBUG -fno-limit-debug-info)
+endif()
+
+# Toolchain and ABI specific flags.
+if(ANDROID_ABI STREQUAL x86 AND ANDROID_PLATFORM_LEVEL LESS 24)
+  # http://b.android.com/222239
+  # http://b.android.com/220159 (internal http://b/31809417)
+  # x86 devices have stack alignment issues.
+  list(APPEND ANDROID_COMPILER_FLAGS -mstackrealign)
+endif()
+
+list(APPEND ANDROID_COMPILER_FLAGS -D_FORTIFY_SOURCE=2)
+
+# STL specific flags.
+if(ANDROID_STL MATCHES "^c\\+\\+_")
+  if(ANDROID_ABI MATCHES "^armeabi")
+    list(APPEND ANDROID_LINKER_FLAGS "-Wl,--exclude-libs,libunwind.a")
+  endif()
+endif()
+
+set(CMAKE_C_STANDARD_LIBRARIES_INIT "-latomic -lm")
+set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "${CMAKE_C_STANDARD_LIBRARIES_INIT}")
+if(ANDROID_CXX_STANDARD_LIBRARIES)
+  string(REPLACE ";" "\" \"" ANDROID_CXX_STANDARD_LIBRARIES "\"${ANDROID_CXX_STANDARD_LIBRARIES}\"")
+  set(CMAKE_CXX_STANDARD_LIBRARIES_INIT "${CMAKE_CXX_STANDARD_LIBRARIES_INIT} ${ANDROID_CXX_STANDARD_LIBRARIES}")
+endif()
+
+# Configuration specific flags.
+
+# PIE is supported on all currently supported Android releases, but it is not
+# supported with static executables, so we still provide ANDROID_PIE as an
+# escape hatch for those.
+if(ANDROID_PIE)
+  set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
+endif()
+
+if(ANDROID_CPP_FEATURES)
+  separate_arguments(ANDROID_CPP_FEATURES)
+  foreach(feature ${ANDROID_CPP_FEATURES})
+    if(NOT ${feature} MATCHES "^(rtti|exceptions|no-rtti|no-exceptions)$")
+      message(FATAL_ERROR "Invalid Android C++ feature: ${feature}.")
+    endif()
+    list(APPEND ANDROID_COMPILER_FLAGS_CXX
+      -f${feature})
+  endforeach()
+  string(REPLACE ";" " " ANDROID_CPP_FEATURES "${ANDROID_CPP_FEATURES}")
+endif()
+if(NOT ANDROID_ALLOW_UNDEFINED_SYMBOLS)
+  list(APPEND ANDROID_LINKER_FLAGS
+    -Wl,--no-undefined)
+endif()
+if(ANDROID_ABI MATCHES "armeabi")
+  # Clang does not set this up properly when using -fno-integrated-as.
+  # https://github.com/android-ndk/ndk/issues/906
+  list(APPEND ANDROID_COMPILER_FLAGS "-march=armv7-a")
+  if(ANDROID_ARM_MODE STREQUAL thumb)
+    list(APPEND ANDROID_COMPILER_FLAGS -mthumb)
+  elseif(ANDROID_ARM_MODE STREQUAL arm)
+    # Default behavior.
+  else()
+    message(FATAL_ERROR "Invalid Android ARM mode: ${ANDROID_ARM_MODE}.")
+  endif()
+  if(ANDROID_ABI STREQUAL armeabi-v7a AND NOT ANDROID_ARM_NEON)
+    list(APPEND ANDROID_COMPILER_FLAGS
+      -mfpu=vfpv3-d16)
+  endif()
+endif()
+
+# CMake automatically forwards all compiler flags to the linker, and clang
+# doesn't like having -Wa flags being used for linking. To prevent CMake from
+# doing this would require meddling with the CMAKE_<LANG>_COMPILE_OBJECT rules,
+# which would get quite messy.
+list(APPEND ANDROID_LINKER_FLAGS -Qunused-arguments)
+
+if(ANDROID_DISABLE_FORMAT_STRING_CHECKS)
+  list(APPEND ANDROID_COMPILER_FLAGS
+    -Wno-error=format-security)
+else()
+  list(APPEND ANDROID_COMPILER_FLAGS
+    -Wformat -Werror=format-security)
+endif()
+
+# Convert these lists into strings.
+string(REPLACE ";" " " ANDROID_COMPILER_FLAGS         "${ANDROID_COMPILER_FLAGS}")
+string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_CXX     "${ANDROID_COMPILER_FLAGS_CXX}")
+string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_DEBUG   "${ANDROID_COMPILER_FLAGS_DEBUG}")
+string(REPLACE ";" " " ANDROID_COMPILER_FLAGS_RELEASE "${ANDROID_COMPILER_FLAGS_RELEASE}")
+string(REPLACE ";" " " ANDROID_LINKER_FLAGS           "${ANDROID_LINKER_FLAGS}")
+string(REPLACE ";" " " ANDROID_LINKER_FLAGS_EXE       "${ANDROID_LINKER_FLAGS_EXE}")
+
+if(ANDROID_CCACHE)
+  set(CMAKE_C_COMPILER_LAUNCHER   "${ANDROID_CCACHE}")
+  set(CMAKE_CXX_COMPILER_LAUNCHER "${ANDROID_CCACHE}")
+endif()
+set(CMAKE_C_COMPILER        "${ANDROID_C_COMPILER}")
+set(CMAKE_CXX_COMPILER      "${ANDROID_CXX_COMPILER}")
+set(CMAKE_AR                "${ANDROID_AR}" CACHE FILEPATH "Archiver")
+set(CMAKE_RANLIB            "${ANDROID_RANLIB}" CACHE FILEPATH "Ranlib")
+set(_CMAKE_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
+
+if(ANDROID_ABI STREQUAL "x86" OR ANDROID_ABI STREQUAL "x86_64")
+  set(CMAKE_ASM_NASM_COMPILER
+    "${ANDROID_TOOLCHAIN_ROOT}/bin/yasm${ANDROID_TOOLCHAIN_SUFFIX}")
+  set(CMAKE_ASM_NASM_COMPILER_ARG1 "-DELF")
+endif()
+
+# Set or retrieve the cached flags.
+# This is necessary in case the user sets/changes flags in subsequent
+# configures. If we included the Android flags in here, they would get
+# overwritten.
+set(CMAKE_C_FLAGS ""
+  CACHE STRING "Flags used by the compiler during all build types.")
+set(CMAKE_CXX_FLAGS ""
+  CACHE STRING "Flags used by the compiler during all build types.")
+set(CMAKE_ASM_FLAGS ""
+  CACHE STRING "Flags used by the compiler during all build types.")
+set(CMAKE_C_FLAGS_DEBUG ""
+  CACHE STRING "Flags used by the compiler during debug builds.")
+set(CMAKE_CXX_FLAGS_DEBUG ""
+  CACHE STRING "Flags used by the compiler during debug builds.")
+set(CMAKE_ASM_FLAGS_DEBUG ""
+  CACHE STRING "Flags used by the compiler during debug builds.")
+set(CMAKE_C_FLAGS_RELEASE ""
+  CACHE STRING "Flags used by the compiler during release builds.")
+set(CMAKE_CXX_FLAGS_RELEASE ""
+  CACHE STRING "Flags used by the compiler during release builds.")
+set(CMAKE_ASM_FLAGS_RELEASE ""
+  CACHE STRING "Flags used by the compiler during release builds.")
+set(CMAKE_MODULE_LINKER_FLAGS ""
+  CACHE STRING "Flags used by the linker during the creation of modules.")
+set(CMAKE_SHARED_LINKER_FLAGS ""
+  CACHE STRING "Flags used by the linker during the creation of dll's.")
+set(CMAKE_EXE_LINKER_FLAGS ""
+  CACHE STRING "Flags used by the linker.")
+
+set(CMAKE_C_FLAGS             "${ANDROID_COMPILER_FLAGS} ${CMAKE_C_FLAGS}")
+set(CMAKE_CXX_FLAGS           "${ANDROID_COMPILER_FLAGS} ${ANDROID_COMPILER_FLAGS_CXX} ${CMAKE_CXX_FLAGS}")
+set(CMAKE_ASM_FLAGS           "${ANDROID_COMPILER_FLAGS} ${CMAKE_ASM_FLAGS}")
+set(CMAKE_C_FLAGS_DEBUG       "${ANDROID_COMPILER_FLAGS_DEBUG} ${CMAKE_C_FLAGS_DEBUG}")
+set(CMAKE_CXX_FLAGS_DEBUG     "${ANDROID_COMPILER_FLAGS_DEBUG} ${CMAKE_CXX_FLAGS_DEBUG}")
+set(CMAKE_ASM_FLAGS_DEBUG     "${ANDROID_COMPILER_FLAGS_DEBUG} ${CMAKE_ASM_FLAGS_DEBUG}")
+set(CMAKE_C_FLAGS_RELEASE     "${ANDROID_COMPILER_FLAGS_RELEASE} ${CMAKE_C_FLAGS_RELEASE}")
+set(CMAKE_CXX_FLAGS_RELEASE   "${ANDROID_COMPILER_FLAGS_RELEASE} ${CMAKE_CXX_FLAGS_RELEASE}")
+set(CMAKE_ASM_FLAGS_RELEASE   "${ANDROID_COMPILER_FLAGS_RELEASE} ${CMAKE_ASM_FLAGS_RELEASE}")
+set(CMAKE_SHARED_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS}")
+set(CMAKE_MODULE_LINKER_FLAGS "${ANDROID_LINKER_FLAGS} ${CMAKE_MODULE_LINKER_FLAGS}")
+set(CMAKE_EXE_LINKER_FLAGS    "${ANDROID_LINKER_FLAGS} ${ANDROID_LINKER_FLAGS_EXE} ${CMAKE_EXE_LINKER_FLAGS}")
+
+# Compatibility for read-only variables.
+# Read-only variables for compatibility with the other toolchain file.
+# We'll keep these around for the existing projects that still use them.
+# TODO: All of the variables here have equivalents in our standard set of
+# configurable variables, so we can remove these once most of our users migrate
+# to those variables.
+set(ANDROID_NATIVE_API_LEVEL ${ANDROID_PLATFORM_LEVEL})
+if(ANDROID_ALLOW_UNDEFINED_SYMBOLS)
+  set(ANDROID_SO_UNDEFINED TRUE)
+else()
+  set(ANDROID_NO_UNDEFINED TRUE)
+endif()
+set(ANDROID_FUNCTION_LEVEL_LINKING TRUE)
+set(ANDROID_GOLD_LINKER TRUE)
+set(ANDROID_NOEXECSTACK TRUE)
+set(ANDROID_RELRO TRUE)
+if(ANDROID_ARM_MODE STREQUAL arm)
+  set(ANDROID_FORCE_ARM_BUILD TRUE)
+endif()
+if(ANDROID_CPP_FEATURES MATCHES "rtti"
+    AND ANDROID_CPP_FEATURES MATCHES "exceptions")
+  set(ANDROID_STL_FORCE_FEATURES TRUE)
+endif()
+if(ANDROID_CCACHE)
+  set(NDK_CCACHE "${ANDROID_CCACHE}")
+endif()
+if(ANDROID_TOOLCHAIN STREQUAL clang)
+  set(ANDROID_TOOLCHAIN_NAME ${ANDROID_TOOLCHAIN_NAME}-clang)
+else()
+  set(ANDROID_TOOLCHAIN_NAME ${ANDROID_TOOLCHAIN_NAME}-4.9)
+endif()
+set(ANDROID_NDK_HOST_X64 TRUE)
+set(ANDROID_NDK_LAYOUT RELEASE)
+if(ANDROID_ABI STREQUAL armeabi-v7a)
+  set(ARMEABI_V7A TRUE)
+  if(ANDROID_ARM_NEON)
+    set(NEON TRUE)
+  endif()
+elseif(ANDROID_ABI STREQUAL arm64-v8a)
+  set(ARM64_V8A TRUE)
+elseif(ANDROID_ABI STREQUAL x86)
+  set(X86 TRUE)
+elseif(ANDROID_ABI STREQUAL x86_64)
+  set(X86_64 TRUE)
+endif()
+set(ANDROID_NDK_HOST_SYSTEM_NAME ${ANDROID_HOST_TAG})
+set(ANDROID_NDK_ABI_NAME ${ANDROID_ABI})
+set(ANDROID_NDK_RELEASE r${ANDROID_NDK_REVISION})
+set(ANDROID_ARCH_NAME ${ANDROID_SYSROOT_ABI})
+set(TOOL_OS_SUFFIX ${ANDROID_TOOLCHAIN_SUFFIX})
+if(ANDROID_TOOLCHAIN STREQUAL clang)
+  set(ANDROID_COMPILER_IS_CLANG TRUE)
+endif()
+
+# CMake 3.7+ compatibility.
+if (CMAKE_VERSION VERSION_GREATER 3.7.0)
+  set(CMAKE_ANDROID_NDK ${ANDROID_NDK})
+  set(CMAKE_ANDROID_NDK_TOOLCHAIN_VERSION clang)
+
+  set(CMAKE_ANDROID_STL_TYPE ${ANDROID_STL})
+
+  if(ANDROID_ABI MATCHES "^armeabi(-v7a)?$")
+    set(CMAKE_ANDROID_ARM_NEON ${ANDROID_ARM_NEON})
+    set(CMAKE_ANDROID_ARM_MODE ${ANDROID_ARM_MODE})
+  endif()
+
+  # https://github.com/android/ndk/issues/861
+  if(ANDROID_ABI STREQUAL armeabi-v7a)
+    set(CMAKE_ANDROID_ARCH arm)
+  elseif(ANDROID_ABI STREQUAL arm64-v8a)
+    set(CMAKE_ANDROID_ARCH arm64)
+  elseif(ANDROID_ABI STREQUAL x86)
+    set(CMAKE_ANDROID_ARCH x86)
+  elseif(ANDROID_ABI STREQUAL x86_64)
+    set(CMAKE_ANDROID_ARCH x86_64)
+  endif()
+
+  # https://github.com/android/ndk/issues/1012
+  set(CMAKE_ASM_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
+  set(CMAKE_C_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
+  set(CMAKE_CXX_ANDROID_TOOLCHAIN_MACHINE "${ANDROID_TOOLCHAIN_NAME}")
+
+  set(CMAKE_ASM_ANDROID_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
+  set(CMAKE_C_ANDROID_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
+  set(CMAKE_CXX_ANDROID_TOOLCHAIN_PREFIX "${ANDROID_TOOLCHAIN_PREFIX}")
+
+  set(CMAKE_ASM_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
+  set(CMAKE_C_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
+  set(CMAKE_CXX_ANDROID_TOOLCHAIN_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
+endif()
diff --git a/build/cmake/exports.cmake b/build/cmake/exports.cmake
deleted file mode 100644
index e78a2bd..0000000
--- a/build/cmake/exports.cmake
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Read-only variables for compatibility with the other toolchain file.
-# We'll keep these around for the existing projects that still use them.
-# TODO: All of the variables here have equivalents in the standard set of
-# cmake configurable variables, so we can remove these once most of our
-# users migrate to those variables.
-
-# From legacy toolchain file.
-set(ANDROID_NDK "${CMAKE_ANDROID_NDK}")
-set(ANDROID_ABI "${CMAKE_ANDROID_ARCH_ABI}")
-set(ANDROID_COMPILER_IS_CLANG TRUE)
-set(ANDROID_PLATFORM "android-${CMAKE_SYSTEM_VERSION}")
-set(ANDROID_PLATFORM_LEVEL "${CMAKE_SYSTEM_VERSION}")
-set(ANDROID_ARM_NEON TRUE)
-if(CMAKE_ANDROID_ARM_MODE)
-  set(ANDROID_ARM_MODE "arm")
-  set(ANDROID_FORCE_ARM_BUILD TRUE)
-else()
-  set(ANDROID_ARM_MODE "thumb")
-endif()
-set(ANDROID_ARCH_NAME "${CMAKE_ANDROID_ARCH}")
-set(ANDROID_LLVM_TRIPLE "${CMAKE_ANDROID_ARCH_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
-set(ANDROID_TOOLCHAIN_ROOT "${CMAKE_ANDROID_NDK_TOOLCHAIN_UNIFIED}")
-set(ANDROID_HOST_TAG "${CMAKE_ANDROID_NDK_TOOLCHAIN_HOST_TAG}")
-set(ANDROID_HOST_PREBUILTS "${CMAKE_ANDROID_NDK}/prebuilt/${CMAKE_ANDROID_NDK_TOOLCHAIN_HOST_TAG}")
-set(ANDROID_AR "${CMAKE_AR}")
-set(ANDROID_RANLIB "${CMAKE_RANLIB}")
-set(ANDROID_STRIP "${CMAKE_STRIP}")
-if(CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows")
-  set(ANDROID_TOOLCHAIN_SUFFIX ".exe")
-endif()
-
-# From other toolchain file.
-set(ANDROID_NATIVE_API_LEVEL "${ANDROID_PLATFORM_LEVEL}")
-if(ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  set(ANDROID_SO_UNDEFINED TRUE)
-else()
-  set(ANDROID_NO_UNDEFINED TRUE)
-endif()
-set(ANDROID_FUNCTION_LEVEL_LINKING TRUE)
-set(ANDROID_GOLD_LINKER TRUE)
-set(ANDROID_NOEXECSTACK TRUE)
-set(ANDROID_RELRO TRUE)
-if(ANDROID_CPP_FEATURES MATCHES "rtti"
-    AND ANDROID_CPP_FEATURES MATCHES "exceptions")
-  set(ANDROID_STL_FORCE_FEATURES TRUE)
-endif()
-if(ANDROID_CCACHE)
-  set(NDK_CCACHE "${ANDROID_CCACHE}")
-endif()
-set(ANDROID_NDK_HOST_X64 TRUE)
-set(ANDROID_NDK_LAYOUT RELEASE)
-if(CMAKE_ANDROID_ARCH_ABI STREQUAL "armeabi-v7a")
-  set(ARMEABI_V7A TRUE)
-  set(NEON TRUE)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL "arm64-v8a")
-  set(ARM64_V8A TRUE)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL "x86")
-  set(X86 TRUE)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL "x86_64")
-  set(X86_64 TRUE)
-elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL "riscv64")
-  set(RISCV64 TRUE)
-endif()
-set(ANDROID_NDK_HOST_SYSTEM_NAME "${ANDROID_HOST_TAG}")
-set(ANDROID_NDK_ABI_NAME "${CMAKE_ANDROID_ARCH_ABI}")
-set(ANDROID_NDK_RELEASE "r${ANDROID_NDK_REVISION}")
-set(TOOL_OS_SUFFIX "${ANDROID_TOOLCHAIN_SUFFIX}")
diff --git a/build/cmake/flags.cmake b/build/cmake/flags.cmake
deleted file mode 100644
index f1f4eb9..0000000
--- a/build/cmake/flags.cmake
+++ /dev/null
@@ -1,119 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This file will be included directly by cmake. It is used to provide
-# additional cflags / ldflags.
-
-set(_ANDROID_NDK_INIT_CFLAGS)
-set(_ANDROID_NDK_INIT_CFLAGS_DEBUG)
-set(_ANDROID_NDK_INIT_CFLAGS_RELEASE)
-set(_ANDROID_NDK_INIT_LDFLAGS)
-set(_ANDROID_NDK_INIT_LDFLAGS_EXE)
-
-# Generic flags.
-string(APPEND _ANDROID_NDK_INIT_CFLAGS
-  " -DANDROID"
-  " -fdata-sections"
-  " -ffunction-sections"
-  " -funwind-tables"
-  " -fstack-protector-strong"
-  " -no-canonical-prefixes")
-
-if(ANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES)
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -D__BIONIC_NO_PAGE_SIZE_MACRO")
-  if(ANDROID_ABI STREQUAL arm64-v8a)
-    string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,-z,max-page-size=16384")
-  endif()
-endif()
-
-if(ANDROID_WEAK_API_DEFS)
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS
-    " -D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__"
-    " -Werror=unguarded-availability")
-endif()
-
-if("hwaddress" IN_LIST ANDROID_SANITIZE)
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -fsanitize=hwaddress -fno-omit-frame-pointer")
-  string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -fsanitize=hwaddress")
-endif()
-
-if("memtag" IN_LIST ANDROID_SANITIZE)
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -fsanitize=memtag-stack -fno-omit-frame-pointer")
-  string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -fsanitize=memtag-stack,memtag-heap -fsanitize-memtag-mode=sync")
-  if(CMAKE_ANDROID_ARCH_ABI STREQUAL "arm64-v8a")
-    string(APPEND _ANDROID_NDK_INIT_CFLAGS " -march=armv8-a+memtag")
-    string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -march=armv8-a+memtag")
-  endif()
-endif()
-
-string(APPEND _ANDROID_NDK_INIT_CFLAGS_DEBUG " -fno-limit-debug-info")
-
-# If we're using LLD we need to use a slower build-id algorithm to work around
-# the old version of LLDB in Android Studio, which doesn't understand LLD's
-# default hash ("fast").
-#
-# https://github.com/android/ndk/issues/885
-string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--build-id=sha1")
-
-if(CMAKE_SYSTEM_VERSION LESS 30)
-  # https://github.com/android/ndk/issues/1196
-  # https://github.com/android/ndk/issues/1589
-  string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--no-rosegment")
-endif()
-
-if (NOT ANDROID_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS)
-  string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--no-undefined-version")
-endif()
-
-string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--fatal-warnings")
-# This should only be set for release modes, but CMake doesn't provide a way for
-# us to be that specific in the new toolchain file.
-# https://github.com/android/ndk/issues/1813
-string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--gc-sections")
-string(APPEND _ANDROID_NDK_INIT_LDFLAGS_EXE " -Wl,--gc-sections")
-
-# Toolchain and ABI specific flags.
-if(CMAKE_ANDROID_ARCH_ABI STREQUAL x86 AND CMAKE_SYSTEM_VERSION LESS 24)
-  # http://b.android.com/222239
-  # http://b.android.com/220159 (internal http://b/31809417)
-  # x86 devices have stack alignment issues.
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -mstackrealign")
-endif()
-
-string(APPEND _ANDROID_NDK_INIT_CFLAGS " -D_FORTIFY_SOURCE=2")
-
-if(CMAKE_ANDROID_ARCH_ABI MATCHES "armeabi")
-  # Clang does not set this up properly when using -fno-integrated-as.
-  # https://github.com/android-ndk/ndk/issues/906
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -march=armv7-a")
-  if(NOT CMAKE_ANDROID_ARM_MODE)
-    string(APPEND _ANDROID_NDK_INIT_CFLAGS " -mthumb")
-  endif()
-endif()
-
-# CMake automatically forwards all compiler flags to the linker, and clang
-# doesn't like having -Wa flags being used for linking. To prevent CMake from
-# doing this would require meddling with the CMAKE_<LANG>_COMPILE_OBJECT rules,
-# which would get quite messy.
-string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Qunused-arguments")
-
-if(ANDROID_DISABLE_FORMAT_STRING_CHECKS)
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -Wno-error=format-security")
-else()
-  string(APPEND _ANDROID_NDK_INIT_CFLAGS " -Wformat -Werror=format-security")
-endif()
-
-if(NOT ANDROID_ALLOW_UNDEFINED_SYMBOLS)
-  string(APPEND _ANDROID_NDK_INIT_LDFLAGS " -Wl,--no-undefined")
-endif()
diff --git a/build/cmake/hooks/post/Android-Clang.cmake b/build/cmake/hooks/post/Android-Clang.cmake
deleted file mode 100644
index 5a3acbf..0000000
--- a/build/cmake/hooks/post/Android-Clang.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the end of
-# Modules/Platform/Android-Clang.cmake.
diff --git a/build/cmake/hooks/post/Android-Determine.cmake b/build/cmake/hooks/post/Android-Determine.cmake
deleted file mode 100644
index 8e7852f..0000000
--- a/build/cmake/hooks/post/Android-Determine.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the end of
-# Modules/Platform/Android-Determine.cmake.
-
-# android.toolchain.cmake may set this to export old variables.
-if(_ANDROID_EXPORT_COMPATIBILITY_VARIABLES)
-  file(READ "${CMAKE_ANDROID_NDK}/build/cmake/exports.cmake" _EXPORTS)
-  string(APPEND CMAKE_SYSTEM_CUSTOM_CODE "\n${_EXPORTS}\n")
-endif()
diff --git a/build/cmake/hooks/post/Android-Initialize.cmake b/build/cmake/hooks/post/Android-Initialize.cmake
deleted file mode 100644
index 6e7aafb..0000000
--- a/build/cmake/hooks/post/Android-Initialize.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the end of
-# Modules/Platform/Android-Initialize.cmake.
diff --git a/build/cmake/hooks/post/Determine-Compiler.cmake b/build/cmake/hooks/post/Determine-Compiler.cmake
deleted file mode 100644
index 4bab2d4..0000000
--- a/build/cmake/hooks/post/Determine-Compiler.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the end of
-# Modules/Platform/Android/Determine-Compiler.cmake.
diff --git a/build/cmake/hooks/pre/Android-Determine.cmake b/build/cmake/hooks/pre/Android-Determine.cmake
deleted file mode 100644
index 1261981..0000000
--- a/build/cmake/hooks/pre/Android-Determine.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the beginning of
-# Modules/Platform/Android-Determine.cmake.
diff --git a/build/cmake/hooks/pre/Android-Initialize.cmake b/build/cmake/hooks/pre/Android-Initialize.cmake
deleted file mode 100644
index 8608129..0000000
--- a/build/cmake/hooks/pre/Android-Initialize.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the beginning of
-# Modules/Platform/Android-Initialize.cmake.
diff --git a/build/cmake/hooks/pre/Android.cmake b/build/cmake/hooks/pre/Android.cmake
deleted file mode 100644
index 8813c94..0000000
--- a/build/cmake/hooks/pre/Android.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the beginning of
-# Modules/Platform/Android.cmake.
diff --git a/build/cmake/hooks/pre/Determine-Compiler.cmake b/build/cmake/hooks/pre/Determine-Compiler.cmake
deleted file mode 100644
index 79cc6c0..0000000
--- a/build/cmake/hooks/pre/Determine-Compiler.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This is a hook file that will be included by cmake at the beginning of
-# Modules/Platform/Android/Determine-Compiler.cmake.
-
-# Skip hook for the legacy toolchain workflow.
-if(CMAKE_SYSTEM_VERSION EQUAL 1)
-  return()
-endif()
-
-if(${CMAKE_VERSION} VERSION_LESS "3.22.0")
-  # If we don't explicitly set the target CMake will ID the compiler using the
-  # default target, causing MINGW to be defined when a Windows host is used.
-  # https://github.com/android/ndk/issues/1581
-  # https://gitlab.kitware.com/cmake/cmake/-/issues/22647
-  if(CMAKE_ANDROID_ARCH_ABI STREQUAL armeabi-v7a)
-    set(ANDROID_LLVM_TRIPLE armv7-none-linux-androideabi)
-  elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL arm64-v8a)
-    set(ANDROID_LLVM_TRIPLE aarch64-none-linux-android)
-  elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL x86)
-    set(ANDROID_LLVM_TRIPLE i686-none-linux-android)
-  elseif(CMAKE_ANDROID_ARCH_ABI STREQUAL x86_64)
-    set(ANDROID_LLVM_TRIPLE x86_64-none-linux-android)
-  else()
-    message(FATAL_ERROR "Invalid Android ABI: ${ANDROID_ABI}.")
-  endif()
-  set(CMAKE_ASM_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
-  set(CMAKE_C_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
-  set(CMAKE_CXX_COMPILER_TARGET "${ANDROID_LLVM_TRIPLE}${CMAKE_SYSTEM_VERSION}")
-endif()
diff --git a/build/core/add-application.mk b/build/core/add-application.mk
index 31294ee..1e57e42 100644
--- a/build/core/add-application.mk
+++ b/build/core/add-application.mk
@@ -163,6 +163,7 @@
 APP_CONLYFLAGS := $(strip $(APP_CONLYFLAGS))
 APP_CPPFLAGS := $(strip $(APP_CPPFLAGS))
 APP_CXXFLAGS := $(strip $(APP_CXXFLAGS))
+APP_RENDERSCRIPT_FLAGS := $(strip $(APP_RENDERSCRIPT_FLAGS))
 APP_ASFLAGS := $(strip $(APP_ASFLAGS))
 APP_ASMFLAGS := $(strip $(APP_ASMFLAGS))
 APP_LDFLAGS  := $(strip $(APP_LDFLAGS))
diff --git a/build/core/add-platform.mk b/build/core/add-platform.mk
new file mode 100644
index 0000000..da83717
--- /dev/null
+++ b/build/core/add-platform.mk
@@ -0,0 +1,33 @@
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+$(call assert-defined,_platform NDK_PLATFORMS_ROOT)
+
+# For each platform, determine the corresponding supported ABIs
+# And record them in NDK_PLATFORM_$(platform)_ABIS
+#
+_abis := $(strip $(notdir $(wildcard $(NDK_PLATFORMS_ROOT)/$(_platform)/arch-*)))
+_abis := $(_abis:arch-%=%)
+
+$(call ndk_log,PLATFORM $(_platform) supports: $(_abis))
+
+NDK_PLATFORM_$(_platform)_ABIS    := $(_abis)
+
+# Record the sysroots for each supported ABI
+#
+$(foreach _abi,$(_abis),\
+  $(eval NDK_PLATFORM_$(_platform)_$(_abi)_SYSROOT := $(NDK_PLATFORMS_ROOT)/$(_platform)/arch-$(_abi))\
+  $(call ndk_log,  ABI $(_abi) sysroot is: $(NDK_PLATFORM_$(_platform)_$(_abi)_SYSROOT))\
+)
diff --git a/build/core/build-all.mk b/build/core/build-all.mk
index 630c490..cfaec1d 100644
--- a/build/core/build-all.mk
+++ b/build/core/build-all.mk
@@ -54,6 +54,16 @@
 PREBUILT_SHARED_LIBRARY   := $(BUILD_SYSTEM)/prebuilt-shared-library.mk
 PREBUILT_STATIC_LIBRARY   := $(BUILD_SYSTEM)/prebuilt-static-library.mk
 
+ANDROID_MK_INCLUDED := \
+  $(CLEAR_VARS) \
+  $(BUILD_HOST_EXECUTABLE) \
+  $(BUILD_HOST_STATIC_LIBRARY) \
+  $(BUILD_STATIC_LIBRARY) \
+  $(BUILD_SHARED_LIBRARY) \
+  $(BUILD_EXECUTABLE) \
+  $(PREBUILT_SHARED_LIBRARY) \
+
+
 # this is the list of directories containing dependency information
 # generated during the build. It will be updated by build scripts
 # when module definitions are parsed.
diff --git a/build/core/build-binary.mk b/build/core/build-binary.mk
index a09ef5c..21c79cd 100644
--- a/build/core/build-binary.mk
+++ b/build/core/build-binary.mk
@@ -27,6 +27,7 @@
 LOCAL_CPPFLAGS := $(__ndk_modules.$(LOCAL_MODULE).CPPFLAGS)
 LOCAL_CXXFLAGS := $(__ndk_modules.$(LOCAL_MODULE).CXXFLAGS)
 LOCAL_LDFLAGS := $(__ndk_modules.$(LOCAL_MODULE).LDFLAGS)
+LOCAL_RENDERSCRIPT_FLAGS := $(__ndk_modules.$(LOCAL_MODULE).RENDERSCRIPT_FLAGS)
 
 # For now, only support target (device-specific modules).
 # We may want to introduce support for host modules in the future
@@ -125,20 +126,20 @@
 	$(call host-echo-build-step,$(PRIVATE_ABI),Clean) "$(PRIVATE_MODULE) [$(PRIVATE_ABI)]"
 	$(hide) $(call host-rmdir,$(PRIVATE_CLEAN_FILES))
 
+ifeq ($(NDK_APP_DEBUGGABLE),true)
+$(NDK_APP_GDBSETUP): PRIVATE_SRC_DIRS += $(LOCAL_C_INCLUDES) $(LOCAL_PATH)
+endif
+
 # list of generated object files
 LOCAL_OBJECTS :=
 
+# list of generated object files from RS files, subset of LOCAL_OBJECTS
+LOCAL_RS_OBJECTS :=
+
 # always define ANDROID when building binaries
 #
 LOCAL_CFLAGS := -DANDROID $(LOCAL_CFLAGS)
 
-ifeq ($(APP_SUPPORT_FLEXIBLE_PAGE_SIZES),true)
-  LOCAL_CFLAGS += -D__BIONIC_NO_PAGE_SIZE_MACRO
-  ifeq ($(APP_ABI),arm64-v8a)
-    LOCAL_LDFLAGS += -Wl,-z,max-page-size=16384
-  endif
-endif
-
 #
 # Add the default system shared libraries to the build
 #
@@ -161,8 +162,22 @@
   # Match the default GCC C++ extensions.
   LOCAL_CPP_EXTENSION := $(default-c++-extensions)
 endif
+LOCAL_RS_EXTENSION := $(default-rs-extensions)
 
-include $(BUILD_SYSTEM)/stl.mk
+ifneq ($(NDK_APP_STL),system)
+    LOCAL_CFLAGS += -nostdinc++
+    LOCAL_LDFLAGS += -nostdlib++
+else
+    # TODO: Remove when https://reviews.llvm.org/D55856 is merged.
+    #
+    # The system STL Android.mk will export -lstdc++, but the Clang driver will
+    # helpfully rewrite -lstdc++ to whatever the default C++ stdlib linker
+    # arguments are, except in the presence of -nostdlib and -nodefaultlibs.
+    # That part of the driver does not account for -nostdlib++. We can fix the
+    # behavior by using -stdlib=libstdc++ so it rewrites -lstdc++ to -lstdc++
+    # instead of -lc++.
+    LOCAL_LDFLAGS += -stdlib=libstdc++
+endif
 
 #
 # If LOCAL_ALLOW_UNDEFINED_SYMBOLS is not true, the linker will allow the generation
@@ -186,14 +201,6 @@
   LOCAL_CFLAGS += $(TARGET_FORMAT_STRING_CFLAGS)
 endif
 
-# Enable branch protection for arm64-v8a
-LOCAL_BRANCH_PROTECTION := $(strip $(LOCAL_BRANCH_PROTECTION))
-ifdef LOCAL_BRANCH_PROTECTION
-    ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
-        LOCAL_CFLAGS += -mbranch-protection=$(LOCAL_BRANCH_PROTECTION)
-    endif
-endif
-
 # http://b.android.com/222239
 # http://b.android.com/220159 (internal http://b/31809417)
 # x86 devices have stack alignment issues.
@@ -203,10 +210,6 @@
     endif
 endif
 
-ifneq ($(LOCAL_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS),true)
-    LOCAL_LDFLAGS += -Wl,--no-undefined-version
-endif
-
 #
 # The original Android build system allows you to use the .arm prefix
 # to a source file name to indicate that it should be defined in either
@@ -243,18 +246,30 @@
 
 $(call clear-all-src-tags)
 
-# Historically the NDK supported both Neon and non-Neon as variants of the
-# armeabi-v7a ABI. These were practically two ABIs but the distinction was not
-# official (APKs did not have separate libraries for Neon and non-Neon devices).
-# As of NDK r24 non-Neon devices are no longer supported, so any options opting
-# *in* to Neon are ignored, and options explicitly opting out of Neon are an
-# error. Users that choose a non-Neon -mfpu in their CFLAGS will receive no
-# diagnostic.
+# As a special extension, the NDK also supports the .neon extension suffix
+# to indicate that a single file can be compiled with ARM NEON support
+# We must support both foo.c.neon and foo.c.arm.neon here
+#
+# Also, if LOCAL_ARM_NEON is set to 'true', force Neon mode for all source
+# files
+#
 
 LOCAL_ARM_NEON := $(strip $(LOCAL_ARM_NEON))
+ifdef LOCAL_ARM_NEON
+  $(if $(filter-out true false,$(LOCAL_ARM_NEON)),\
+    $(call __ndk_info,LOCAL_ARM_NEON must be defined either to 'true' or 'false' in $(LOCAL_MAKEFILE), not '$(LOCAL_ARM_NEON)')\
+    $(call __ndk_error,Aborting) \
+  )
+endif
 
 ifeq ($(LOCAL_ARM_NEON),false)
-    $(call __ndk_error,Building non-Neon code is no longer supported.)
+  no_neon_sources := $(filter-out %.neon,$(LOCAL_SRC_FILES))
+  no_neon_sources := $(strip $(no_neon_sources))
+  $(call tag-src-files,$(no_neon_sources:%.arm=%),no_neon)
+  # tag the precompiled header with 'neon' tag if it exists
+  ifneq (,$(LOCAL_PCH))
+    $(call tag-src-files,$(LOCAL_PCH),no_neon)
+  endif
 endif
 
 LOCAL_SRC_FILES := $(LOCAL_SRC_FILES:%.neon=%)
@@ -312,12 +327,13 @@
 # all_source_patterns contains the list of filename patterns that correspond
 # to source files recognized by our build system
 ifneq ($(filter x86 x86_64, $(TARGET_ARCH_ABI)),)
-all_source_extensions := .c .s .S .asm $(LOCAL_CPP_EXTENSION)
+all_source_extensions := .c .s .S .asm $(LOCAL_CPP_EXTENSION) $(LOCAL_RS_EXTENSION)
 else
-all_source_extensions := .c .s .S $(LOCAL_CPP_EXTENSION)
+all_source_extensions := .c .s .S $(LOCAL_CPP_EXTENSION) $(LOCAL_RS_EXTENSION)
 endif
 all_source_patterns   := $(foreach _ext,$(all_source_extensions),%$(_ext))
 all_cpp_patterns      := $(foreach _ext,$(LOCAL_CPP_EXTENSION),%$(_ext))
+all_rs_patterns       := $(foreach _ext,$(LOCAL_RS_EXTENSION),%$(_ext))
 
 unknown_sources := $(strip $(filter-out $(all_source_patterns),$(LOCAL_SRC_FILES)))
 ifdef unknown_sources
@@ -337,6 +353,15 @@
 LOCAL_OBJECTS := $(subst :,_,$(LOCAL_OBJECTS))
 LOCAL_OBJECTS := $(foreach _obj,$(LOCAL_OBJECTS),$(LOCAL_OBJS_DIR)/$(_obj))
 
+LOCAL_RS_OBJECTS := $(filter $(all_rs_patterns),$(LOCAL_SRC_FILES))
+$(foreach _ext,$(LOCAL_RS_EXTENSION),\
+    $(eval LOCAL_RS_OBJECTS := $$(LOCAL_RS_OBJECTS:%$(_ext)=%$$(TARGET_OBJ_EXTENSION)))\
+)
+LOCAL_RS_OBJECTS := $(filter %$(TARGET_OBJ_EXTENSION),$(LOCAL_RS_OBJECTS))
+LOCAL_RS_OBJECTS := $(subst ../,__/,$(LOCAL_RS_OBJECTS))
+LOCAL_RS_OBJECTS := $(subst :,_,$(LOCAL_RS_OBJECTS))
+LOCAL_RS_OBJECTS := $(foreach _obj,$(LOCAL_RS_OBJECTS),$(LOCAL_OBJS_DIR)/$(_obj))
+
 # If the module has any kind of C++ features, enable them in LOCAL_CPPFLAGS
 #
 ifneq (,$(call module-has-c++-features,$(LOCAL_MODULE),rtti))
@@ -346,6 +371,23 @@
     LOCAL_CPPFLAGS += -fexceptions
 endif
 
+# Set include patch for renderscript
+ifneq ($(LOCAL_RENDERSCRIPT_INCLUDES_OVERRIDE),)
+    LOCAL_RENDERSCRIPT_INCLUDES := $(LOCAL_RENDERSCRIPT_INCLUDES_OVERRIDE)
+else
+    LOCAL_RENDERSCRIPT_INCLUDES := \
+        $(RENDERSCRIPT_PLATFORM_HEADER)/scriptc \
+        $(RENDERSCRIPT_TOOLCHAIN_HEADER) \
+        $(LOCAL_RENDERSCRIPT_INCLUDES)
+endif
+
+# Only enable the compatibility path when LOCAL_RENDERSCRIPT_COMPATIBILITY is defined.
+RS_COMPAT :=
+ifeq ($(LOCAL_RENDERSCRIPT_COMPATIBILITY),true)
+    RS_COMPAT := true
+endif
+
+
 # Build PCH
 
 get-pch-name = $(strip \
@@ -366,9 +408,10 @@
     # Build PCH
     $(call compile-cpp-source,$(LOCAL_PCH),$(LOCAL_BUILT_PCH).gch)
 
-    # The PCH must be compiled the same way as the sources (thumb vs arm must
-    # match). This means that we'd have to generate a PCH for both foo.c and
-    # foo.c.arm.
+    # The PCH must be compiled the same way as the sources (thumb vs arm, neon
+    # vs non-neon must match). This means that we'd have to generate a PCH for
+    # each combination of foo.c.arm and foo.c.neon (do we allow
+    # foo.c.arm.neon?).
     #
     # Since files with those source tags should be the minority, precompiling
     # that header might be a net loss compared to just using it normally. As
@@ -377,6 +420,11 @@
     # See https://github.com/android-ndk/ndk/issues/14
     TAGS_TO_FILTER :=
 
+    # If neon is off, strip out .neon files.
+    ifneq (true,$(LOCAL_ARM_NEON))
+        TAGS_TO_FILTER += neon
+    endif
+
     # If we're building thumb, strip out .arm files.
     ifneq (arm,$(LOCAL_ARM_MODE))
         TAGS_TO_FILTER += arm
@@ -403,6 +451,14 @@
 # Build the sources to object files
 #
 
+# Include RenderScript headers if rs files are found.
+ifneq ($(filter $(all_rs_patterns),$(LOCAL_SRC_FILES)),)
+    LOCAL_C_INCLUDES += \
+        $(RENDERSCRIPT_PLATFORM_HEADER) \
+        $(RENDERSCRIPT_PLATFORM_HEADER)/cpp \
+        $(TARGET_OBJS)/$(LOCAL_MODULE)
+endif
+
 do_tidy := $(NDK_APP_CLANG_TIDY)
 ifdef LOCAL_CLANG_TIDY
     do_tidy := $(LOCAL_CLANG_TIDY)
@@ -421,6 +477,10 @@
     $(call compile-cpp-source,$(src),$(call get-object-name,$(src)))\
 )
 
+$(foreach src,$(filter $(all_rs_patterns),$(LOCAL_SRC_FILES)),\
+    $(call compile-rs-source,$(src),$(call get-rs-scriptc-name,$(src)),$(call get-rs-bc-name,$(src)),$(call get-rs-so-name,$(src)),$(call get-object-name,$(src)),$(RS_COMPAT))\
+)
+
 ifneq ($(filter x86 x86_64, $(TARGET_ARCH_ABI)),)
 $(foreach src,$(filter %.asm,$(LOCAL_SRC_FILES)), $(call compile-asm-source,$(src),$(call get-object-name,$(src))))
 endif
@@ -435,32 +495,38 @@
 # Handle the static and shared libraries this module depends on
 #
 
+linker_ldflags :=
+using_lld := false
+ifeq ($(APP_LD),lld)
+    linker_ldflags := -fuse-ld=lld
+    using_lld := true
+endif
+
+combined_ldflags := $(TARGET_LDFLAGS) $(NDK_APP_LDFLAGS) $(LOCAL_LDFLAGS)
+ndk_fuse_ld_flags := $(filter -fuse-ld=%,$(combined_ldflags))
+ndk_used_linker := $(lastword $(ndk_fuse_ld_flags))
+ifeq ($(ndk_used_linker),-fuse-ld=lld)
+    using_lld := true
+else
+    # In case the user has set APP_LD=lld but also disabled it for a specific
+    # module.
+    ifneq ($(ndk_used_linker),)
+        using_lld := false
+    endif
+endif
+
 # https://github.com/android/ndk/issues/885
 # If we're using LLD we need to use a slower build-id algorithm to work around
 # the old version of LLDB in Android Studio, which doesn't understand LLD's
 # default hash ("fast").
-linker_ldflags := -Wl,--build-id=sha1
-
-ifneq (,$(call lt,$(APP_PLATFORM_LEVEL),30))
-    # https://github.com/android/ndk/issues/1196
-    # https://github.com/android/ndk/issues/1589
-    linker_ldflags += -Wl,--no-rosegment
+ifeq ($(using_lld),true)
+    linker_ldflags += -Wl,--build-id=tree
+else
+    linker_ldflags += -Wl,--build-id
 endif
 
 my_ldflags := $(TARGET_LDFLAGS) $(linker_ldflags) $(NDK_APP_LDFLAGS) $(LOCAL_LDFLAGS)
 
-# https://github.com/android/ndk/issues/1390
-# Only a warning rather than an error because the API level cannot be configured
-# on a per-module basis. If the user has an APP_PLATFORM that happens to be able
-# to build the static executables there's no need to fail the build.
-ifneq (,$(filter -static,$(my_ldflags)))
-    ifneq ($(APP_PLATFORM),$(NDK_MAX_PLATFORM))
-        $(call __ndk_info,WARNING: Building static executable but APP_PLATFORM \
-            $(APP_PLATFORM) is not the latest API level $(NDK_MAX_PLATFORM). \
-            Build may not succeed.)
-    endif
-endif
-
 # When LOCAL_SHORT_COMMANDS is defined to 'true' we are going to write the
 # list of all object files and/or static/shared libraries that appear on the
 # command line to a file, then use the @<listfile> syntax to invoke it.
@@ -476,6 +542,7 @@
 $(call generate-file-dir,$(LOCAL_BUILT_MODULE))
 
 $(LOCAL_BUILT_MODULE): PRIVATE_OBJECTS := $(LOCAL_OBJECTS)
+$(LOCAL_BUILT_MODULE): PRIVATE_LIBGCC := $(TARGET_LIBGCC)
 $(LOCAL_BUILT_MODULE): PRIVATE_LIBATOMIC := $(TARGET_LIBATOMIC)
 
 $(LOCAL_BUILT_MODULE): PRIVATE_LD := $(TARGET_LD)
@@ -487,12 +554,6 @@
 $(LOCAL_BUILT_MODULE): PRIVATE_CC := $(TARGET_CC)
 $(LOCAL_BUILT_MODULE): PRIVATE_SYSROOT_API_LIB_DIR := $(SYSROOT_API_LIB_DIR)
 
-ifeq (,$(call module_needs_clangxx,$(LOCAL_MODULE)))
-$(LOCAL_BUILT_MODULE): PRIVATE_LD_DRIVER := $(TARGET_CC)
-else
-$(LOCAL_BUILT_MODULE): PRIVATE_LD_DRIVER := $(TARGET_CXX)
-endif
-
 ifeq ($(call module-get-class,$(LOCAL_MODULE)),STATIC_LIBRARY)
 
 #
@@ -616,7 +677,8 @@
 $(call -ndk-mod-debug,.  static_libs='$(static_libs)')
 $(call -ndk-mod-debug,.  whole_static_libs='$(whole_static_libs)')
 
-shared_libs       := $(call map,module-get-built,$(shared_libs))
+shared_libs       := $(call map,module-get-built,$(shared_libs))\
+                     $(TARGET_PREBUILT_SHARED_LIBRARIES)
 static_libs       := $(call map,module-get-built,$(static_libs))
 whole_static_libs := $(call map,module-get-built,$(whole_static_libs))
 
@@ -626,6 +688,8 @@
 
 # The list of object/static/shared libraries passed to the linker when
 # building shared libraries and executables. order is important.
+#
+# Cannot use immediate evaluation because PRIVATE_LIBGCC may not be defined at this point.
 linker_objects_and_libraries = $(strip $(call TARGET-get-linker-objects-and-libraries,\
     $(LOCAL_OBJECTS), \
     $(static_libs), \
diff --git a/build/core/build-module.mk b/build/core/build-module.mk
index 323c05c..68ee528 100644
--- a/build/core/build-module.mk
+++ b/build/core/build-module.mk
@@ -44,3 +44,4 @@
 __ndk_modules.$(LOCAL_MODULE).CPPFLAGS := $(LOCAL_CPPFLAGS)
 __ndk_modules.$(LOCAL_MODULE).CXXFLAGS := $(LOCAL_CXXFLAGS)
 __ndk_modules.$(LOCAL_MODULE).LDFLAGS := $(LOCAL_LDFLAGS)
+__ndk_modules.$(LOCAL_MODULE).RENDERSCRIPT_FLAGS := $(LOCAL_RENDERSCRIPT_FLAGS)
diff --git a/build/core/default-build-commands.mk b/build/core/default-build-commands.mk
index 59c0592..138707b 100644
--- a/build/core/default-build-commands.mk
+++ b/build/core/default-build-commands.mk
@@ -31,6 +31,7 @@
     $(call host-path, $1) \
     $(call link-whole-archives,$3) \
     $(call host-path, $2) \
+    $(PRIVATE_LIBGCC) \
     $(PRIVATE_LIBATOMIC) \
     $(call host-path, $4) \
 
@@ -41,8 +42,17 @@
 # This flag disables the above security checks
 TARGET_DISABLE_FORMAT_STRING_CFLAGS := -Wno-error=format-security
 
+# NOTE: Ensure that TARGET_LIBGCC is placed after all private objects
+#       and static libraries, but before any other library in the link
+#       command line when generating shared libraries and executables.
+#
+#       This ensures that all libgcc.a functions required by the target
+#       will be included into it, instead of relying on what's available
+#       on other libraries like libc.so, which may change between system
+#       releases due to toolchain or library changes.
+#
 define cmd-build-shared-library
-$(PRIVATE_LD_DRIVER) \
+$(PRIVATE_CXX) \
     -Wl,-soname,$(notdir $(LOCAL_BUILT_MODULE)) \
     -shared \
     $(PRIVATE_LINKER_OBJECTS_AND_LIBRARIES) \
@@ -57,7 +67,8 @@
 # directly needed. ld.gold (default for all other architectures) doesn't emulate
 # this buggy behavior.
 define cmd-build-executable
-$(PRIVATE_LD_DRIVER) \
+$(PRIVATE_CXX) \
+    -Wl,--gc-sections \
     -Wl,-rpath-link=$(call host-path,$(PRIVATE_SYSROOT_API_LIB_DIR)) \
     -Wl,-rpath-link=$(call host-path,$(TARGET_OUT)) \
     $(PRIVATE_LINKER_OBJECTS_AND_LIBRARIES) \
@@ -73,15 +84,11 @@
 
 cmd-strip = $(PRIVATE_STRIP) $(PRIVATE_STRIP_MODE) $(call host-path,$1)
 
-# arm32 currently uses a linker script in place of libgcc to ensure that
-# libunwind is linked in the correct order. --exclude-libs does not propagate to
-# the contents of the linker script and can't be specified within the linker
-# script. Hide both regardless of architecture to future-proof us in case we
-# move other architectures to a linker script (which we may want to do so we
-# automatically link libclangrt on other architectures).
-TARGET_LIBATOMIC = -latomic
+TARGET_LIBGCC = -lgcc -Wl,--exclude-libs,libgcc_real.a
+TARGET_LIBATOMIC = -latomic -Wl,--exclude-libs,libatomic.a
 TARGET_LDLIBS := -lc -lm
 
+TOOLCHAIN_ROOT := $(NDK_ROOT)/toolchains/llvm/prebuilt/$(HOST_TAG64)
 LLVM_TOOLCHAIN_PREFIX := $(TOOLCHAIN_ROOT)/bin/
 
 # IMPORTANT: The following definitions must use lazy assignment because
@@ -89,8 +96,11 @@
 # the toolchain's setup.mk script.
 TOOLCHAIN_PREFIX = $(TOOLCHAIN_ROOT)/bin/$(TOOLCHAIN_NAME)-
 
-TARGET_CC = $(LLVM_TOOLCHAIN_PREFIX)clang$(HOST_EXEEXT)
-TARGET_CXX = $(LLVM_TOOLCHAIN_PREFIX)clang++$(HOST_EXEEXT)
+ifneq ($(findstring ccc-analyzer,$(CC)),)
+    TARGET_CC = $(CC)
+else
+    TARGET_CC = $(LLVM_TOOLCHAIN_PREFIX)clang$(HOST_EXEEXT)
+endif
 
 CLANG_TIDY = $(LLVM_TOOLCHAIN_PREFIX)clang-tidy$(HOST_EXEEXT)
 
@@ -117,38 +127,41 @@
 
 GLOBAL_CFLAGS += -D_FORTIFY_SOURCE=2
 
-
-ifeq ($(APP_WEAK_API_DEFS), true)
-  GLOBAL_CFLAGS += \
-      -D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__ \
-      -Werror=unguarded-availability \
-
-endif
-
 GLOBAL_LDFLAGS = \
     -target $(LLVM_TRIPLE)$(TARGET_PLATFORM_LEVEL) \
     -no-canonical-prefixes \
 
-ifeq ($(APP_OPTIM),release)
-    GLOBAL_LDFLAGS += -Wl,--gc-sections
-endif
-
 GLOBAL_CXXFLAGS = $(GLOBAL_CFLAGS) -fno-exceptions -fno-rtti
 
 TARGET_CFLAGS =
 TARGET_CONLYFLAGS =
 TARGET_CXXFLAGS = $(TARGET_CFLAGS)
 
+ifneq ($(findstring c++-analyzer,$(CXX)),)
+    TARGET_CXX = $(CXX)
+else
+    TARGET_CXX = $(LLVM_TOOLCHAIN_PREFIX)clang++$(HOST_EXEEXT)
+endif
+
+TARGET_RS_CC    = $(RENDERSCRIPT_TOOLCHAIN_PREFIX)llvm-rs-cc
+TARGET_RS_BCC   = $(RENDERSCRIPT_TOOLCHAIN_PREFIX)bcc_compat
+TARGET_RS_FLAGS = -Wall -Werror
+ifeq (,$(findstring 64,$(TARGET_ARCH_ABI)))
+TARGET_RS_FLAGS += -m32
+else
+TARGET_RS_FLAGS += -m64
+endif
+
 TARGET_ASM      = $(TOOLCHAIN_ROOT)/bin/yasm
 TARGET_ASMFLAGS =
 
-TARGET_LD       = $(TOOLCHAIN_ROOT)/bin/ld
+TARGET_LD       = $(TOOLCHAIN_PREFIX)ld
 TARGET_LDFLAGS :=
 
-TARGET_AR = $(LLVM_TOOLCHAIN_PREFIX)llvm-ar$(HOST_EXEEXT)
+TARGET_AR = $(TOOLCHAIN_PREFIX)ar
 TARGET_ARFLAGS := crsD
 
-TARGET_STRIP = $(LLVM_TOOLCHAIN_PREFIX)llvm-strip$(HOST_EXEEXT)
+TARGET_STRIP    = $(TOOLCHAIN_PREFIX)strip
 
 TARGET_OBJ_EXTENSION := .o
 TARGET_LIB_EXTENSION := .a
diff --git a/build/core/define-missing-prebuilt.mk b/build/core/define-missing-prebuilt.mk
deleted file mode 100644
index 30adbc4..0000000
--- a/build/core/define-missing-prebuilt.mk
+++ /dev/null
@@ -1,27 +0,0 @@
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# https://github.com/android/ndk/issues/1559
-#
-# When LOCAL_ALLOW_MISSING_PREBUILT is true we avoid checking for missing
-# prebuilt libraries early and instead let the copy rule fail. This leads to a
-# worse diagnostic but supports the use case where AGP runs `ndk-build -n` to
-# get the build commands during sync time and the "pre" built library is
-# actually built by another module that hasn't been built yet.
-#
-# This phony target is only generated when the library actually is missing (see
-# the callsite in prebuilt-library.mk).
-.PHONY: $(prebuilt)
diff --git a/build/core/definitions.mk b/build/core/definitions.mk
index 7b8c17e..71dcd23 100644
--- a/build/core/definitions.mk
+++ b/build/core/definitions.mk
@@ -39,9 +39,7 @@
 # Returns  : the name of the last parsed Android.mk file
 # Usage    : $(local-makefile)
 # -----------------------------------------------------------------------------
-_last_android_mk = $(lastword $(filter %Android.mk,$(MAKEFILE_LIST)))
-_last_non_ndk_makefile = $(lastword $(filter-out $(NDK_ROOT)%,$(MAKEFILE_LIST)))
-local-makefile = $(if $(_last_android_mk),$(_last_android_mk),$(_last_non_ndk_makefile))
+local-makefile = $(lastword $(filter %Android.mk,$(MAKEFILE_LIST)))
 
 # -----------------------------------------------------------------------------
 # Function : assert-defined
@@ -76,6 +74,9 @@
 # The list of default C++ extensions supported by GCC.
 default-c++-extensions := .cc .cp .cxx .cpp .CPP .c++ .C
 
+# The list of default RS extensions supported by llvm-rs-cc
+default-rs-extensions := .rs .fs
+
 # -----------------------------------------------------------------------------
 # Function : generate-empty-file
 # Arguments: 1: file path
@@ -228,9 +229,7 @@
 # These are documented by docs/ANDROID-MK.TXT. Exception is LOCAL_MODULE
 #
 modules-LOCALS := \
-    ALLOW_MISSING_PREBUILT \
     ALLOW_UNDEFINED_SYMBOLS \
-    ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS \
     ARM_MODE \
     ARM_NEON \
     ASFLAGS \
@@ -263,6 +262,10 @@
     MODULE_FILENAME \
     PATH \
     PCH \
+    RENDERSCRIPT_FLAGS \
+    RENDERSCRIPT_INCLUDES \
+    RENDERSCRIPT_INCLUDES_OVERRIDE \
+    RENDERSCRIPT_TARGET_API \
     SHARED_LIBRARIES \
     SHORT_COMMANDS \
     SRC_FILES \
@@ -690,10 +693,35 @@
     $(eval __extensions := $(call module-get-c++-extensions,$1))\
     $(filter $(foreach __extension,$(__extensions),%$(__extension)),$(__files))
 
-# Returns a non-empty string if a module has C++ sources
+# Returns true if a module has C++ sources
+#
 module-has-c++-sources = $(strip $(call module-get-c++-sources,$1) \
                                  $(filter true,$(__ndk_modules.$1.HAS_CPP)))
 
+
+# Add C++ dependencies to any module that has C++ sources.
+# $1: list of C++ runtime static libraries (if any)
+# $2: list of C++ runtime shared libraries (if any)
+# $3: list of C++ runtime ldlibs (if any)
+modules-add-c++-dependencies = \
+    $(foreach __module,$(__ndk_modules),\
+        $(if $(call module-has-c++-sources,$(__module)),\
+            $(call ndk_log,Module '$(__module)' has C++ sources)\
+            $(call module-add-c++-deps,$(__module),$1,$2,$3),\
+        )\
+        $(if $(call module-has-c++-features,$(__module),rtti exceptions),\
+            $(if $(filter system,$(NDK_APP_STL)),\
+                $(call ndk_log,Module '$(__module)' uses C++ features and the system STL)\
+                $(call import-module,cxx-stl/llvm-libc++)\
+                $(call import-module,cxx-stl/llvm-libc++abi)\
+                $(call module-add-c++-deps,$(__module),c++abi)\
+                $(if $(filter true,$(NDK_PLATFORM_NEEDS_ANDROID_SUPPORT)),\
+                    $(call module-add-c++-deps,$(__module),android_support))\
+                $(if $(filter armeabi-v7a,$(TARGET_ARCH_ABI)),\
+                    $(call module-add-c++-deps,$(__module),unwind,,-ldl))))\
+    )
+
+
 # Return the compiler flags used to compile a C++ module
 # Order matters and should match the one used by the build command
 module-get-c++-flags = $(strip \
@@ -765,20 +793,21 @@
     $(if $(filter $2,$(__cxxflags)),true,)\
     )
 
-# Returns a non-empty string if the module should be linked with clang++ rather
-# than clang.
+# Add standard C++ dependencies to a given module
 #
-# A module should use clang++ iff it has C++ sources itself or if it depends on
-# a static library with C++ sources. We do not need to use clang++ for shared
-# library dependencies.
-module_needs_clangxx = $(strip \
-  $(call module-has-c++-sources,$1)\
-  $(foreach __dep,$(call module-get-all-dependencies,$1),\
-    $(if $(call module-is-static-library,$(__dep)),\
-      $(call module-has-c++-sources,$(__dep))\
-    )\
-  )\
-)
+# $1: module name
+# $2: list of C++ runtime static libraries (if any)
+# $3: list of C++ runtime shared libraries (if any)
+# $4: list of C++ runtime ldlibs (if any)
+#
+module-add-c++-deps = \
+    $(if $(call strip,$2),$(call ndk_log,Add dependency '$(call strip,$2)' to module '$1'))\
+    $(eval __ndk_modules.$1.STATIC_LIBRARIES += $(2))\
+    $(if $(call strip,$3),$(call ndk_log,Add dependency '$(call strip,$3)' to module '$1'))\
+    $(eval __ndk_modules.$1.SHARED_LIBRARIES += $(3))\
+    $(if $(call strip,$4),$(call ndk_log,Add dependency '$(call strip,$4)' to module '$1'))\
+    $(eval __ndk_modules.$1.LDLIBS += $(4))
+
 
 # =============================================================================
 #
@@ -984,13 +1013,11 @@
 # 'tags' associated to it. A tag name must not contain space, and its
 # usage can vary.
 #
-# For example, the 'debug' tag is used to sources that must be built in debug
-# mode, the 'arm' tag is used for sources that must be built using the 32-bit
-# instruction set on ARM platforms. Historically .neon was used to enable Neon
-# for a given source file, but Neon was made the default in r21 and non-Neon
-# mode is no longer supported in r24 so these tags are accepted but have no
-# effect. A no_neon tag was supported as an implementation detail only; it could
-# not be used by Android.mk files, and is no longer present.
+# For example, the 'debug' tag is used to sources that must be built
+# in debug mode, the 'arm' tag is used for sources that must be built
+# using the 32-bit instruction set on ARM platforms, and 'neon' is used
+# for sources that must be built with ARM Advanced SIMD (a.k.a. NEON)
+# support.
 #
 # More tags might be introduced in the future.
 #
@@ -1168,16 +1195,13 @@
     APP_SHORT_COMMANDS \
     APP_STL \
     APP_STRIP_MODE \
-    APP_SUPPORT_FLEXIBLE_PAGE_SIZES \
     APP_THIN_ARCHIVE \
-    APP_WEAK_API_DEFS \
     APP_WRAP_SH \
 
 # NDK_ALL_ABIS is not configured yet.
 NDK_APP_VARS_OPTIONAL += \
     APP_WRAP_SH_armeabi-v7a \
     APP_WRAP_SH_arm64-v8a \
-    APP_WRAP_SH_riscv64 \
     APP_WRAP_SH_x86 \
     APP_WRAP_SH_x86_64 \
 
@@ -1206,7 +1230,7 @@
     $(subst ../,__/,\
       $(subst :,_,\
         $(eval __obj := $1)\
-        $(foreach __ext,.c .s .S .asm $(LOCAL_CPP_EXTENSION),\
+        $(foreach __ext,.c .s .S .asm $(LOCAL_CPP_EXTENSION) $(LOCAL_RS_EXTENSION),\
             $(eval __obj := $(__obj:%$(__ext)=%$(TARGET_OBJ_EXTENSION)))\
         )\
         $(__obj)\
@@ -1215,12 +1239,44 @@
 -test-get-object-name = \
   $(eval TARGET_OBJ_EXTENSION=.o)\
   $(eval LOCAL_CPP_EXTENSION ?= .cpp)\
+  $(eval LOCAL_RS_EXTENSION ?= .rs)\
   $(call test-expect,foo.o,$(call get-object-name,foo.c))\
   $(call test-expect,bar.o,$(call get-object-name,bar.s))\
   $(call test-expect,zoo.o,$(call get-object-name,zoo.S))\
   $(call test-expect,tot.o,$(call get-object-name,tot.cpp))\
+  $(call test-expect,RS.o,$(call get-object-name,RS.rs))\
   $(call test-expect,goo.o,$(call get-object-name,goo.asm))
 
+get-rs-scriptc-name = $(strip \
+    $(subst ../,__/,\
+      $(subst :,_,\
+        $(eval __obj := $1)\
+        $(foreach __ext,$(LOCAL_RS_EXTENSION),\
+            $(eval __obj := $(__obj:%$(__ext)=%.cpp))\
+        )\
+        $(dir $(__obj))ScriptC_$(notdir $(__obj))\
+    )))
+
+get-rs-bc-name = $(strip \
+    $(subst ../,__/,\
+      $(subst :,_,\
+        $(eval __obj := $1)\
+        $(foreach __ext,$(LOCAL_RS_EXTENSION),\
+            $(eval __obj := $(__obj:%$(__ext)=%.bc))\
+        )\
+        $(__obj)\
+    )))
+
+get-rs-so-name = $(strip \
+    $(subst ../,__/,\
+      $(subst :,_,\
+        $(eval __obj := $1)\
+        $(foreach __ext,$(LOCAL_RS_EXTENSION),\
+            $(eval __obj := $(__obj:%$(__ext)=%$(TARGET_SONAME_EXTENSION)))\
+        )\
+        $(notdir $(__obj))\
+    )))
+
 # -----------------------------------------------------------------------------
 # Macro    : hide
 # Returns  : nothing
@@ -1285,7 +1341,7 @@
 endif
 
 $$(call generate-file-dir,$$(_OBJ))
-$$(_OBJ): $$(_SRC) $$(LOCAL_MAKEFILE) $$(NDK_APP_APPLICATION_MK)
+$$(_OBJ): $$(_SRC) $$(LOCAL_MAKEFILE) $$(NDK_APP_APPLICATION_MK) $(LOCAL_RS_OBJECTS)
 	$$(call host-echo-build-step,$$(PRIVATE_ABI),$$(PRIVATE_TEXT)) "$$(PRIVATE_MODULE) <= $$(notdir $$(PRIVATE_SRC))"
 	$$(hide) $$(call host-rm,$$(call host-path,$$(PRIVATE_OBJ)))
 	$$(hide) $$(PRIVATE_CC) -MMD -MP -MF $$(PRIVATE_DEPS) $$(PRIVATE_CFLAGS) $$(call host-path,$$(PRIVATE_SRC)) -o $$(call host-path,$$(PRIVATE_OBJ))
@@ -1325,6 +1381,79 @@
 sub_commands_json += $$(_JSON_INTERMEDIATE)
 endef
 
+
+# For renderscript: slightly different from the above ev-build-file
+# _RS_SRC: RS source file
+# _CPP_SRC: ScriptC_RS.cpp source file
+# _BC_SRC: Bitcode source file
+# _BC_SO: Bitcode SO name, no path
+# _OBJ: destination file
+# _RS_CC: 'compiler' command for _RS_SRC
+# _RS_BCC: 'compiler' command for _BC_SRC
+# _CXX: 'compiler' command for _CPP_SRC
+# _RS_FLAGS: 'compiler' flags for _RS_SRC
+# _CPP_FLAGS: 'compiler' flags for _CPP_SRC
+# _LD_FLAGS: 'compiler' flags for linking
+# _TEXT: Display text (e.g. "Compile RS")
+# _OUT: output dir
+# _COMPAT: 'true' if bcc_compat is required
+#
+define ev-build-rs-file
+$$(_OBJ): PRIVATE_ABI       := $$(TARGET_ARCH_ABI)
+$$(_OBJ): PRIVATE_RS_SRC    := $$(_RS_SRC)
+$$(_OBJ): PRIVATE_CPP_SRC   := $$(_CPP_SRC)
+$$(_OBJ): PRIVATE_BC_SRC    := $$(_BC_SRC)
+$$(_OBJ): PRIVATE_OBJ       := $$(_OBJ)
+$$(_OBJ): PRIVATE_BC_OBJ    := $$(_BC_SRC)$(TARGET_OBJ_EXTENSION)
+$$(_OBJ): PRIVATE_BC_SO     := $$(_BC_SO)
+$$(_OBJ): PRIVATE_DEPS      := $$(call host-path,$$(_OBJ).d)
+$$(_OBJ): PRIVATE_MODULE    := $$(LOCAL_MODULE)
+$$(_OBJ): PRIVATE_TEXT      := $$(_TEXT)
+$$(_OBJ): PRIVATE_RS_CC     := $$(_RS_CC)
+$$(_OBJ): PRIVATE_RS_BCC    := $$(_RS_BCC)
+$$(_OBJ): PRIVATE_CXX       := $$(_CXX)
+$$(_OBJ): PRIVATE_RS_FLAGS  := $$(_RS_FLAGS)
+$$(_OBJ): PRIVATE_CPPFLAGS  := $$(_CPP_FLAGS)
+$$(_OBJ): PRIVATE_LD        := $$(TARGET_LD)
+$$(_OBJ): PRIVATE_LDFLAGS   := $$(_LD_FLAGS)
+$$(_OBJ): PRIVATE_OUT       := $$(TARGET_OUT)
+$$(_OBJ): PRIVATE_RS_TRIPLE := $$(RS_TRIPLE)
+$$(_OBJ): PRIVATE_COMPAT    := $$(_COMPAT)
+$$(_OBJ): PRIVATE_LIB_PATH  := $$(RENDERSCRIPT_TOOLCHAIN_PREBUILT_ROOT)/platform/$(TARGET_ARCH)
+$$(_OBJ): PRIVATE_SYSROOT_LINK_ARG := $$(SYSROOT_LINK_ARG)
+
+ifeq ($$(LOCAL_SHORT_COMMANDS),true)
+_OPTIONS_LISTFILE := $$(_OBJ).cflags
+$$(_OBJ): $$(call generate-list-file,$$(_CPP_FLAGS),$$(_OPTIONS_LISTFILE))
+$$(_OBJ): PRIVATE_CPPFLAGS := @$$(call host-path,$$(_OPTIONS_LISTFILE))
+$$(_OBJ): $$(_OPTIONS_LISTFILE)
+endif
+
+# llvm-rc-cc.exe has problem accepting input *.rs with path. To workaround:
+# cd ($dir $(_SRC)) ; llvm-rs-cc $(notdir $(_SRC)) -o ...full-path...
+#
+ifeq ($$(_COMPAT),true)
+	# In COMPAT mode, use LD instead of CXX to bypass the gradle check for their book-keeping of native libs.
+	# And this is what we do with SDK.
+	# TODO: We could use CXX after gradle can correctly handle librs.*.so.
+$$(_OBJ): $$(_RS_SRC) $$(LOCAL_MAKEFILE) $$(NDK_APP_APPLICATION_MK)
+	$$(call host-echo-build-step,$$(PRIVATE_ABI),$$(PRIVATE_TEXT)) "$$(PRIVATE_MODULE) <= $$(notdir $$(PRIVATE_RS_SRC))"
+	$$(hide) \
+	cd $$(call host-path,$$(dir $$(PRIVATE_RS_SRC))) && $$(PRIVATE_RS_CC) -o $$(call host-path,$$(abspath $$(dir $$(PRIVATE_OBJ))))/ -d $$(abspath $$(call host-path,$$(dir $$(PRIVATE_OBJ)))) -MD -reflect-c++ -target-api $(strip $(subst android-,,$(APP_PLATFORM))) $$(PRIVATE_RS_FLAGS) $$(notdir $$(PRIVATE_RS_SRC))
+	$$(hide) \
+	$$(PRIVATE_RS_BCC) -O3 -o $$(call host-path,$$(PRIVATE_BC_OBJ)) -fPIC -shared -rt-path $$(PRIVATE_LIB_PATH)/librsrt.bc -mtriple $$(PRIVATE_RS_TRIPLE) $$(call host-path,$$(PRIVATE_BC_SRC))
+	$$(PRIVATE_LD) -shared -Bsymbolic -z noexecstack -z relro -z now -nostdlib $$(call host-path,$$(PRIVATE_BC_OBJ)) $$(PRIVATE_LIB_PATH)/libcompiler_rt.a -o $$(call host-path,$$(PRIVATE_OUT)/librs.$$(PRIVATE_BC_SO)) $$(PRIVATE_SYSROOT_LINK_ARG) -L $$(PRIVATE_LIB_PATH) -lRSSupport -lm -lc
+	$$(PRIVATE_CXX) -MMD -MP -MF $$(PRIVATE_DEPS) $$(PRIVATE_CPPFLAGS) $$(call host-path,$$(PRIVATE_CPP_SRC)) -o $$(call host-path,$$(PRIVATE_OBJ))
+else
+$$(_OBJ): $$(_RS_SRC) $$(LOCAL_MAKEFILE) $$(NDK_APP_APPLICATION_MK)
+	$$(call host-echo-build-step,$$(PRIVATE_ABI),$$(PRIVATE_TEXT)) "$$(PRIVATE_MODULE) <= $$(notdir $$(PRIVATE_RS_SRC))"
+	$$(hide) \
+	cd $$(call host-path,$$(dir $$(PRIVATE_RS_SRC))) && $$(PRIVATE_RS_CC) -o $$(call host-path,$$(abspath $$(dir $$(PRIVATE_OBJ))))/ -d $$(abspath $$(call host-path,$$(dir $$(PRIVATE_OBJ)))) -MD -reflect-c++ -target-api $(strip $(subst android-,,$(APP_PLATFORM))) $$(PRIVATE_RS_FLAGS) $$(notdir $$(PRIVATE_RS_SRC))
+	$$(hide) \
+	$$(PRIVATE_CXX) -MMD -MP -MF $$(PRIVATE_DEPS) $$(PRIVATE_CPPFLAGS) $$(call host-path,$$(PRIVATE_CPP_SRC)) -o $$(call host-path,$$(PRIVATE_OBJ))
+endif
+endef
+
 # This assumes the same things than ev-build-file, but will handle
 # the definition of LOCAL_FILTER_ASM as well.
 define ev-build-source-file
@@ -1495,7 +1624,7 @@
 endif
 
 $$(call generate-file-dir,$$(_OBJ))
-$$(_OBJ): $$(_SRC) $$(LOCAL_MAKEFILE) $$(NDK_APP_APPLICATION_MK)
+$$(_OBJ): $$(_SRC) $$(LOCAL_MAKEFILE) $$(NDK_APP_APPLICATION_MK) $(LOCAL_RS_OBJECTS)
 	$$(call host-echo-build-step,$$(PRIVATE_ABI),$$(PRIVATE_TEXT)) "$$(PRIVATE_MODULE) <= $$(notdir $$(PRIVATE_SRC))"
 	$$(hide) $$(PRIVATE_CC) $$(PRIVATE_CFLAGS) $$(call host-path,$$(PRIVATE_SRC)) -o $$(call host-path,$$(PRIVATE_OBJ))
 endef
@@ -1702,6 +1831,72 @@
 # -----------------------------------------------------------------------------
 clang-tidy-cpp = $(eval $(call ev-clang-tidy-cpp,$1,$2))
 
+# -----------------------------------------------------------------------------
+# Template  : ev-compile-rs-source
+# Arguments : 1: single RS source file name (relative to LOCAL_PATH)
+#             2: intermediate cpp file (without path)
+#             3: intermediate bc file (without path)
+#             4: so file from bc (without path)
+#             5: target object file (without path)
+#             6: 'true' if bcc_compat is required
+# Returns   : None
+# Usage     : $(eval $(call ev-compile-rs-source,<srcfile>,<cppfile>,<objfile>)
+# Rationale : Internal template evaluated by compile-rs-source
+# -----------------------------------------------------------------------------
+
+define  ev-compile-rs-source
+_RS_SRC:=$$(call local-source-file-path,$(1))
+_CPP_SRC:=$$(LOCAL_OBJS_DIR:%/=%)/$(2)
+_BC_SRC:=$$(LOCAL_OBJS_DIR:%/=%)/$(3)
+_BC_SO:=$(4)
+_OBJ:=$$(LOCAL_OBJS_DIR:%/=%)/$(5)
+_COMPAT := $(6)
+_CPP_FLAGS := \
+    $$(GLOBAL_CXXFLAGS) \
+    $$(TARGET_CXXFLAGS) \
+    $$(call get-src-file-target-cflags,$(1)) \
+    $$(call host-c-includes, $$(LOCAL_C_INCLUDES) $$(LOCAL_PATH)) \
+    $$(NDK_APP_CFLAGS) \
+    $$(NDK_APP_CPPFLAGS) \
+    $$(NDK_APP_CXXFLAGS) \
+    $$(LOCAL_CFLAGS) \
+    $$(LOCAL_CPPFLAGS) \
+    $$(LOCAL_CXXFLAGS) \
+    -fno-rtti \
+    -c \
+
+_LD_FLAGS := \
+    $$(GLOBAL_LDFLAGS) \
+    $$(TARGET_LDFLAGS) \
+
+_RS_FLAGS := $$(call host-c-includes, $$(LOCAL_RENDERSCRIPT_INCLUDES) $$(LOCAL_PATH)) \
+          $$(TARGET_RS_FLAGS) \
+          $$(LOCAL_RENDERSCRIPT_FLAGS) \
+          $$(call host-c-includes,$$(TARGET_RENDERSCRIPT_INCLUDES)) \
+
+_RS_CC  := $$(NDK_CCACHE) $$(TARGET_RS_CC)
+_RS_BCC := $$(NDK_CCACHE) $$(TARGET_RS_BCC)
+_CXX    := $$(NDK_CCACHE) $$(TARGET_CXX)
+_TEXT   := Compile RS
+_OUT    := $$(TARGET_OUT)
+
+$$(eval $$(call ev-build-rs-file))
+endef
+
+# -----------------------------------------------------------------------------
+# Function  : compile-rs-source
+# Arguments : 1: single RS source file name (relative to LOCAL_PATH)
+#             2: intermediate cpp file name
+#             3: intermediate bc file
+#             4: so file from bc (without path)
+#             5: object file name
+#             6: 'true' if bcc_compat is required
+# Returns   : None
+# Usage     : $(call compile-rs-source,<srcfile>)
+# Rationale : Setup everything required to build a single RS source file
+# -----------------------------------------------------------------------------
+compile-rs-source = $(eval $(call ev-compile-rs-source,$1,$2,$3,$4,$5,$6))
+
 #
 #  Module imports
 #
@@ -1866,8 +2061,21 @@
 #
 
 # The list of registered STL implementations we support
-NDK_STL_LIST := c++_shared c++_static system none
+NDK_STL_LIST :=
 
+# Used internally to register a given STL implementation, see below.
+#
+# $1: STL name as it appears in APP_STL (e.g. system)
+# $2: STL module path (e.g. cxx-stl/system)
+# $3: list of static libraries all modules will depend on
+# $4: list of shared libraries all modules will depend on
+#
+ndk-stl-register = \
+    $(eval __ndk_stl := $(strip $1)) \
+    $(eval NDK_STL_LIST += $(__ndk_stl)) \
+    $(eval NDK_STL.$(__ndk_stl).IMPORT_MODULE := $(strip $2)) \
+    $(eval NDK_STL.$(__ndk_stl).STATIC_LIBS := $(strip $(call strip-lib-prefix,$3))) \
+    $(eval NDK_STL.$(__ndk_stl).SHARED_LIBS := $(strip $(call strip-lib-prefix,$4))) \
 
 # Called to check that the value of APP_STL is a valid one.
 # $1: STL name as it apperas in APP_STL (e.g. 'system')
@@ -1878,6 +2086,42 @@
         $(call __ndk_info,Please use one of the following instead: $(NDK_STL_LIST))\
         $(call __ndk_error,Aborting))
 
+# Called before the top-level Android.mk is parsed to
+# select the STL implementation.
+# $1: STL name as it appears in APP_STL (e.g. system)
+#
+ndk-stl-select = \
+    $(if $(filter none,$1),,\
+        $(if $(NDK_STL.$1.IMPORT_MODULE),\
+            $(call import-module,$(NDK_STL.$1.IMPORT_MODULE)) \
+        )\
+    )
+
+# Called after all Android.mk files are parsed to add
+# proper STL dependencies to every C++ module.
+# $1: STL name as it appears in APP_STL (e.g. system)
+#
+ndk-stl-add-dependencies = \
+    $(call modules-add-c++-dependencies,\
+        $(NDK_STL.$1.STATIC_LIBS),\
+        $(NDK_STL.$1.SHARED_LIBS),\
+        $(NDK_STL.$1.LDLIBS))
+
+$(call ndk-stl-register,none)
+$(call ndk-stl-register,system)
+
+$(call ndk-stl-register,\
+    c++_static,\
+    cxx-stl/llvm-libc++,\
+    c++_static\
+    )
+
+$(call ndk-stl-register,\
+    c++_shared,\
+    cxx-stl/llvm-libc++,\
+    ,\
+    c++_shared\
+    )
 
 ifneq (,$(NDK_UNIT_TESTS))
 $(call ndk-run-all-tests)
diff --git a/build/core/gdb.mk b/build/core/gdb.mk
new file mode 100644
index 0000000..bd154fc
--- /dev/null
+++ b/build/core/gdb.mk
@@ -0,0 +1,59 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Ensure that for debuggable applications, gdbserver will be copied to
+# the proper location
+
+NDK_APP_GDBSERVER := $(NDK_APP_DST_DIR)/gdbserver
+NDK_APP_GDBSETUP := $(NDK_APP_DST_DIR)/gdb.setup
+
+ifeq ($(NDK_APP_DEBUGGABLE),true)
+ifeq ($(TARGET_SONAME_EXTENSION),.so)
+
+installed_modules: $(NDK_APP_GDBSERVER)
+
+$(NDK_APP_GDBSERVER): PRIVATE_ABI     := $(TARGET_ARCH_ABI)
+$(NDK_APP_GDBSERVER): PRIVATE_NAME    := $(TOOLCHAIN_NAME)
+$(NDK_APP_GDBSERVER): PRIVATE_SRC     := $(TARGET_GDBSERVER)
+$(NDK_APP_GDBSERVER): PRIVATE_DST     := $(NDK_APP_GDBSERVER)
+
+$(call generate-file-dir,$(NDK_APP_GDBSERVER))
+
+$(NDK_APP_GDBSERVER): clean-installed-binaries
+	$(call host-echo-build-step,$(PRIVATE_ABI),Gdbserver) "[$(PRIVATE_NAME)] $(call pretty-dir,$(PRIVATE_DST))"
+	$(hide) $(call host-install,$(PRIVATE_SRC),$(PRIVATE_DST))
+endif
+
+# Install gdb.setup for both .so and .bc projects
+ifneq (,$(filter $(TARGET_SONAME_EXTENSION),.so .bc))
+installed_modules: $(NDK_APP_GDBSETUP)
+
+$(NDK_APP_GDBSETUP): PRIVATE_ABI := $(TARGET_ARCH_ABI)
+$(NDK_APP_GDBSETUP): PRIVATE_DST := $(NDK_APP_GDBSETUP)
+$(NDK_APP_GDBSETUP): PRIVATE_SOLIB_PATH := $(TARGET_OUT)
+$(NDK_APP_GDBSETUP): PRIVATE_SRC_DIRS := $(SYSROOT_INC)
+
+$(NDK_APP_GDBSETUP):
+	$(call host-echo-build-step,$(PRIVATE_ABI),Gdbsetup) "$(call pretty-dir,$(PRIVATE_DST))"
+	$(hide) $(HOST_ECHO) "set solib-search-path $(call host-path,$(PRIVATE_SOLIB_PATH))" > $(PRIVATE_DST)
+	$(hide) $(HOST_ECHO) "directory $(call host-path,$(call remove-duplicates,$(PRIVATE_SRC_DIRS)))" >> $(PRIVATE_DST)
+
+$(call generate-file-dir,$(NDK_APP_GDBSETUP))
+
+# This prevents parallel execution to clear gdb.setup after it has been written to
+$(NDK_APP_GDBSETUP): clean-installed-binaries
+endif
+endif
diff --git a/build/core/import-locals.mk b/build/core/import-locals.mk
index 387c5dd..3a97dd5 100644
--- a/build/core/import-locals.mk
+++ b/build/core/import-locals.mk
@@ -32,6 +32,7 @@
 imported_CFLAGS     := $(call module-get-listed-export,$(all_depends),CFLAGS)
 imported_CONLYFLAGS := $(call module-get-listed-export,$(all_depends),CONLYFLAGS)
 imported_CPPFLAGS   := $(call module-get-listed-export,$(all_depends),CPPFLAGS)
+imported_RENDERSCRIPT_FLAGS := $(call module-get-listed-export,$(all_depends),RENDERSCRIPT_FLAGS)
 imported_ASMFLAGS   := $(call module-get-listed-export,$(all_depends),ASMFLAGS)
 imported_C_INCLUDES := $(call module-get-listed-export,$(all_depends),C_INCLUDES)
 imported_LDFLAGS    := $(call module-get-listed-export,$(all_depends),LDFLAGS)
@@ -43,6 +44,7 @@
     $(info   CFLAGS='$(imported_CFLAGS)')
     $(info   CONLYFLAGS='$(imported_CONLYFLAGS)')
     $(info   CPPFLAGS='$(imported_CPPFLAGS)')
+    $(info   RENDERSCRIPT_FLAGS='$(imported_RENDERSCRIPT_FLAGS)')
     $(info   ASMFLAGS='$(imported_ASMFLAGS)')
     $(info   C_INCLUDES='$(imported_C_INCLUDES)')
     $(info   LDFLAGS='$(imported_LDFLAGS)')
@@ -58,6 +60,7 @@
 LOCAL_CFLAGS     := $(strip $(imported_CFLAGS) $(LOCAL_CFLAGS))
 LOCAL_CONLYFLAGS := $(strip $(imported_CONLYFLAGS) $(LOCAL_CONLYFLAGS))
 LOCAL_CPPFLAGS   := $(strip $(imported_CPPFLAGS) $(LOCAL_CPPFLAGS))
+LOCAL_RENDERSCRIPT_FLAGS := $(strip $(imported_RENDERSCRIPT_FLAGS) $(LOCAL_RENDERSCRIPT_FLAGS))
 LOCAL_ASMFLAGS := $(strip $(imported_ASMFLAGS) $(LOCAL_ASMFLAGS))
 LOCAL_LDFLAGS    := $(strip $(imported_LDFLAGS) $(LOCAL_LDFLAGS))
 
diff --git a/build/core/init.mk b/build/core/init.mk
index a897f77..69e5beb 100644
--- a/build/core/init.mk
+++ b/build/core/init.mk
@@ -150,11 +150,6 @@
         endif
         # We should not be there, but just in case !
         ifneq (,$(findstring CYGWIN,$(UNAME)))
-            $(call __ndk_warning,Unsupported build environment CYGWIN.)
-            HOST_OS := windows
-        endif
-        ifneq (,$(findstring MINGW64,$(UNAME)))
-            $(call __ndk_warning,Unsupported build environment MSYS.)
             HOST_OS := windows
         endif
         ifeq ($(HOST_OS),)
@@ -196,7 +191,7 @@
         ifneq (,$(filter CYGWIN%,$(UNAME)))
             $(call ndk_log,Cygwin detected: $(shell uname -a))
             HOST_OS := cygwin
-            _ := $(shell rm -f NUL) # Cleaning up
+            DUMMY := $(shell rm -f NUL) # Cleaning up
         else
             ifneq (,$(filter MINGW32%,$(UNAME)))
                 $(call ndk_log,MSys detected: $(shell uname -a))
@@ -278,7 +273,6 @@
 HOST_PREBUILT := $(strip $(wildcard $(HOST_TOOLS_ROOT)/bin))
 HOST_MAKE := $(strip $(NDK_HOST_MAKE))
 HOST_PYTHON := $(strip $(NDK_HOST_PYTHON))
-TOOLCHAIN_ROOT := $(NDK_ROOT)/toolchains/llvm/prebuilt/$(HOST_TAG64)
 ifdef HOST_PREBUILT
     $(call ndk_log,Host tools prebuilt directory: $(HOST_PREBUILT))
     # The windows prebuilt binaries are for ndk-build.cmd
@@ -288,11 +282,7 @@
             HOST_MAKE := $(wildcard $(HOST_PREBUILT)/make$(HOST_EXEEXT))
         endif
        ifndef HOST_PYTHON
-            ifeq ($(HOST_OS),windows)
-                HOST_PYTHON := $(wildcard $(TOOLCHAIN_ROOT)/python3/python$(HOST_EXEEXT))
-            else
-                HOST_PYTHON := $(wildcard $(TOOLCHAIN_ROOT)/python3/bin/python3$(HOST_EXEEXT))
-            endif
+            HOST_PYTHON := $(wildcard $(HOST_PREBUILT)/python$(HOST_EXEEXT))
         endif
     endif
 else
@@ -410,6 +400,41 @@
 #
 # ====================================================================
 
+# The platform files were moved in the Android source tree from
+# $TOP/ndk/build/platforms to $TOP/development/ndk/platforms. However,
+# the official NDK release packages still place them under the old
+# location for now, so deal with this here
+#
+NDK_PLATFORMS_ROOT := $(strip $(NDK_PLATFORMS_ROOT))
+ifndef NDK_PLATFORMS_ROOT
+    NDK_PLATFORMS_ROOT := $(strip $(wildcard $(NDK_ROOT)/platforms))
+    ifndef NDK_PLATFORMS_ROOT
+        NDK_PLATFORMS_ROOT := $(strip $(wildcard $(NDK_ROOT)/build/platforms))
+    endif
+
+    ifndef NDK_PLATFORMS_ROOT
+        $(call __ndk_info,Could not find platform files (headers and libraries))
+        $(if $(strip $(wildcard $(NDK_ROOT)/RELEASE.TXT)),\
+            $(call __ndk_info,Please define NDK_PLATFORMS_ROOT to point to a valid directory.)\
+        )
+        $(call __ndk_error,Aborting)
+    endif
+
+    $(call ndk_log,Found platform root directory: $(NDK_PLATFORMS_ROOT))
+endif
+ifeq ($(strip $(wildcard $(NDK_PLATFORMS_ROOT)/android-*)),)
+    $(call __ndk_info,Your NDK_PLATFORMS_ROOT points to an invalid directory)
+    $(call __ndk_info,Current value: $(NDK_PLATFORMS_ROOT))
+    $(call __ndk_error,Aborting)
+endif
+
+NDK_ALL_PLATFORMS := $(strip $(notdir $(wildcard $(NDK_PLATFORMS_ROOT)/android-*)))
+$(call ndk_log,Found supported platforms: $(NDK_ALL_PLATFORMS))
+
+$(foreach _platform,$(NDK_ALL_PLATFORMS),\
+  $(eval include $(BUILD_SYSTEM)/add-platform.mk)\
+)
+
 # ====================================================================
 #
 # Read all toolchain-specific configuration files.
diff --git a/build/core/install_stl.mk b/build/core/install_stl.mk
deleted file mode 100644
index dcea75d..0000000
--- a/build/core/install_stl.mk
+++ /dev/null
@@ -1,22 +0,0 @@
-# Not bothering to check if there's actually any C++ code in the app. c++_shared
-# is not the default, so if someone has set it explicitly we might as well do
-# what they say.
-ifeq ($(APP_STL),c++_shared)
-
-NDK_LIBCXX_TARGET := $(NDK_APP_DST_DIR)/libc++_shared.so
-NDK_LIBCXX_LIB_PATH := $(SYSROOT_LIB_DIR)/libc++_shared.so
-
-installed_modules: $(NDK_LIBCXX_TARGET)
-
-$(NDK_LIBCXX_TARGET): PRIVATE_ABI := $(TARGET_ARCH_ABI)
-$(NDK_LIBCXX_TARGET): PRIVATE_NAME := Install
-$(NDK_LIBCXX_TARGET): PRIVATE_SRC := $(NDK_LIBCXX_LIB_PATH)
-$(NDK_LIBCXX_TARGET): PRIVATE_DST := $(NDK_LIBCXX_TARGET)
-
-$(call generate-file-dir,$(NDK_LIBCXX_TARGET))
-
-$(NDK_LIBCXX_TARGET): clean-installed-binaries
-	$(call host-echo-build-step,$(PRIVATE_ABI),$(PRIVATE_NAME) "$(call pretty-dir,$(PRIVATE_DST))")
-	$(hide) $(call host-install,$(PRIVATE_SRC),$(PRIVATE_DST))
-
-endif
\ No newline at end of file
diff --git a/build/core/openmp.mk b/build/core/openmp.mk
index 1999fd8..5debf70 100644
--- a/build/core/openmp.mk
+++ b/build/core/openmp.mk
@@ -39,7 +39,7 @@
 $(NDK_APP_OMP): PRIVATE_SRC := $(NDK_OMP_LIB_PATH)
 $(NDK_APP_OMP): PRIVATE_DST := $(NDK_APP_OMP)
 
-$(call generate-file-dir,$(NDK_APP_OMP))
+$(call generate-file-dir,$(NDK_APP_$(NDK_SANITIZER_NAME)))
 
 $(NDK_APP_OMP): clean-installed-binaries
 	$(call host-echo-build-step,$(PRIVATE_ABI),OpenMP "$(call pretty-dir,$(PRIVATE_DST))")
diff --git a/build/core/prebuilt-library.mk b/build/core/prebuilt-library.mk
index cb9c336..4c7d93d 100644
--- a/build/core/prebuilt-library.mk
+++ b/build/core/prebuilt-library.mk
@@ -40,16 +40,9 @@
 prebuilt := $(strip $(wildcard $(prebuilt_path)))
 
 ifndef prebuilt
-    ifeq ($(LOCAL_ALLOW_MISSING_PREBUILT),true)
-        prebuilt := $(prebuilt_path)
-        include $(BUILD_SYSTEM)/define-missing-prebuilt.mk
-    else
-        $(call __ndk_info,ERROR:$(LOCAL_MAKEFILE):$(LOCAL_MODULE): \
-            LOCAL_SRC_FILES points to a missing file)
-        $(call __ndk_info,Check that $(prebuilt_path) exists, or that its path \
-            is correct)
-        $(call __ndk_error,Aborting)
-    endif
+$(call __ndk_info,ERROR:$(LOCAL_MAKEFILE):$(LOCAL_MODULE): LOCAL_SRC_FILES points to a missing file)
+$(call __ndk_info,Check that $(prebuilt_path) exists, or that its path is correct)
+$(call __ndk_error,Aborting)
 endif
 
 # If LOCAL_MODULE_FILENAME is defined, it will be used to name the file
diff --git a/build/core/sanitizers.mk b/build/core/sanitizers.mk
index 0967ee6..3401ecb 100644
--- a/build/core/sanitizers.mk
+++ b/build/core/sanitizers.mk
@@ -15,7 +15,6 @@
 #
 
 NDK_APP_ASAN := $(NDK_APP_DST_DIR)/$(TARGET_ASAN_BASENAME)
-NDK_APP_TSAN := $(NDK_APP_DST_DIR)/$(TARGET_TSAN_BASENAME)
 NDK_APP_UBSAN := $(NDK_APP_DST_DIR)/$(TARGET_UBSAN_BASENAME)
 
 NDK_MODULES_LDFLAGS :=
@@ -35,11 +34,6 @@
 NDK_SANITIZER_EXCLUDE_FSANITIZE_ARGS :=
 include $(BUILD_SYSTEM)/install_sanitizer.mk
 
-NDK_SANITIZER_NAME := TSAN
-NDK_SANITIZER_FSANITIZE_ARGS := thread
-NDK_SANITIZER_EXCLUDE_FSANITIZE_ARGS :=
-include $(BUILD_SYSTEM)/install_sanitizer.mk
-
 # If the user has not specified their own wrap.sh and is using ASAN, install a
 # default ASAN wrap.sh for them.
 ifneq (,$(filter address,$(NDK_SANITIZERS)))
@@ -47,11 +41,3 @@
         NDK_APP_WRAP_SH_$(TARGET_ARCH_ABI) := $(NDK_ROOT)/wrap.sh/asan.sh
     endif
 endif
-
-# If the user has not specified their own wrap.sh and is using HWASAN, install a
-# default HWASAN wrap.sh for them.
-ifneq (,$(filter hwaddress,$(NDK_SANITIZERS)))
-    ifeq ($(NDK_NO_USER_WRAP_SH),true)
-        NDK_APP_WRAP_SH_$(TARGET_ARCH_ABI) := $(NDK_ROOT)/wrap.sh/hwasan.sh
-    endif
-endif
diff --git a/build/core/setup-abi.mk b/build/core/setup-abi.mk
index b6325ee..0e278c5 100644
--- a/build/core/setup-abi.mk
+++ b/build/core/setup-abi.mk
@@ -26,19 +26,29 @@
 
 TARGET_OUT := $(NDK_APP_OUT)/$(_app)/$(TARGET_ARCH_ABI)
 
-TARGET_PLATFORM_LEVEL := $(APP_PLATFORM_LEVEL)
+# For x86 and mips: the minimal platform level is android-9
+TARGET_PLATFORM_SAVED := $(TARGET_PLATFORM)
 
-# Pull up the minSdkVersion for this ABI if it is higher than the user's
-# APP_PLATFORM. A warning will be separately emitted in setup-app-platform.mk if
-# the user's APP_PLATFORM is too low for the NDK overall.
-MIN_OS_FOR_TARGET := $(NDK_ABI_${TARGET_ARCH_ABI}_MIN_OS_VERSION)
-ifneq ($(call lt,$(TARGET_PLATFORM_LEVEL),$(MIN_OS_FOR_TARGET)),)
-    TARGET_PLATFORM_LEVEL := $(MIN_OS_FOR_TARGET)
+# For 64-bit ABIs: the minimal platform level is android-21
+ifneq ($(filter $(NDK_KNOWN_DEVICE_ABI64S),$(TARGET_ARCH_ABI)),)
+$(foreach _plat,3 4 5 8 9 10 11 12 13 14 15 16 17 18 19 20,\
+    $(eval TARGET_PLATFORM := $$(subst android-$(_plat),android-21,$$(TARGET_PLATFORM)))\
+)
 endif
 
-# Not used by ndk-build, but are documented for use by Android.mk files.
-TARGET_PLATFORM := android-$(TARGET_PLATFORM_LEVEL)
-TARGET_ABI := $(TARGET_PLATFORM)-$(TARGET_ARCH_ABI)
+TARGET_PLATFORM_LEVEL := $(strip $(subst android-,,$(TARGET_PLATFORM)))
+
+# If we're targeting a new enough platform version, we don't actually need to
+# cover any gaps in libc for libc++ support. In those cases, save size in the
+# APK by avoiding libandroid_support.
+#
+# This is also a requirement for static executables, since using
+# libandroid_support with a modern libc.a will result in multiple symbol
+# definition errors.
+NDK_PLATFORM_NEEDS_ANDROID_SUPPORT := true
+ifeq ($(call gte,$(TARGET_PLATFORM_LEVEL),21),$(true))
+    NDK_PLATFORM_NEEDS_ANDROID_SUPPORT := false
+endif
 
 # Separate the debug and release objects. This prevents rebuilding
 # everything when you switch between these two modes. For projects
@@ -49,4 +59,29 @@
 TARGET_OBJS := $(TARGET_OUT)/objs
 endif
 
+TARGET_GDB_SETUP := $(TARGET_OUT)/setup.gdb
+
+# RS triple
+ifeq ($(TARGET_ARCH_ABI),armeabi-v7a)
+  RS_TRIPLE := armv7-none-linux-gnueabi
+endif
+ifeq ($(TARGET_ARCH_ABI),armeabi)
+  RS_TRIPLE := arm-none-linux-gnueabi
+endif
+ifeq ($(TARGET_ARCH_ABI),arm64-v8a)
+  RS_TRIPLE := aarch64-linux-android
+endif
+ifeq ($(TARGET_ARCH_ABI),mips)
+  RS_TRIPLE := mipsel-unknown-linux
+endif
+ifeq ($(TARGET_ARCH_ABI),x86)
+  RS_TRIPLE := i686-unknown-linux
+endif
+ifeq ($(TARGET_ARCH_ABI),x86_64)
+  RS_TRIPLE := x86_64-unknown-linux
+endif
+
 include $(BUILD_SYSTEM)/setup-toolchain.mk
+
+# Restore TARGET_PLATFORM, see above.
+TARGET_PLATFORM := $(TARGET_PLATFORM_SAVED)
diff --git a/build/core/setup-app-platform.mk b/build/core/setup-app-platform.mk
index c5ed8ae..9326f97 100644
--- a/build/core/setup-app-platform.mk
+++ b/build/core/setup-app-platform.mk
@@ -101,6 +101,14 @@
     $(call __ndk_error,Aborting.)
 endif
 
+# We pull low values up, fill in gaps, replace platform code names, replace
+# "latest", and error out on high values. Anything left is either a gap or
+# codename we missed, or user error.
+ifneq (,$(strip $(filter-out $(NDK_ALL_PLATFORMS),$(APP_PLATFORM))))
+    $(call __ndk_info,APP_PLATFORM set to unknown platform: $(APP_PLATFORM).)
+    $(call __ndk_error,Aborting)
+endif
+
 ifneq (null,$(APP_PROJECT_PATH))
 
 # Check platform level (after adjustment) against android:minSdkVersion in AndroidManifest.xml
diff --git a/build/core/setup-app.mk b/build/core/setup-app.mk
index 70724dd..dbd25e1 100644
--- a/build/core/setup-app.mk
+++ b/build/core/setup-app.mk
@@ -39,6 +39,9 @@
 ndk-app-$(_app): $(NDK_APP_MODULES)
 all: ndk-app-$(_app)
 
+# which platform/abi/toolchain are we going to use?
+TARGET_PLATFORM := $(call get,$(_map),APP_PLATFORM)
+
 # The ABI(s) to use
 NDK_APP_ABI := $(subst $(comma),$(space),$(strip $(NDK_APP_ABI)))
 ifndef NDK_APP_ABI
@@ -103,8 +106,18 @@
 
 clean-installed-binaries::
 	$(hide) $(call host-rm,$(NDK_APP_ABI:%=$(NDK_APP_LIBS_OUT)/%/*))
+	$(hide) $(call host-rm,$(NDK_APP_ABI:%=$(NDK_APP_LIBS_OUT)/%/gdbserver))
+	$(hide) $(call host-rm,$(NDK_APP_ABI:%=$(NDK_APP_LIBS_OUT)/%/gdb.setup))
 endif
 
+# Renderscript
+
+RENDERSCRIPT_TOOLCHAIN_PREBUILT_ROOT := \
+    $(NDK_ROOT)/toolchains/renderscript/prebuilt/$(HOST_TAG64)
+RENDERSCRIPT_TOOLCHAIN_PREFIX := $(RENDERSCRIPT_TOOLCHAIN_PREBUILT_ROOT)/bin/
+RENDERSCRIPT_TOOLCHAIN_HEADER := $(RENDERSCRIPT_TOOLCHAIN_PREBUILT_ROOT)/clang-include
+RENDERSCRIPT_PLATFORM_HEADER := $(RENDERSCRIPT_TOOLCHAIN_PREBUILT_ROOT)/platform/rs
+
 COMPILE_COMMANDS_JSON := $(call host-path,compile_commands.json)
 sub_commands_json :=
 
diff --git a/build/core/setup-toolchain.mk b/build/core/setup-toolchain.mk
index d1d36fb..7c058ba 100644
--- a/build/core/setup-toolchain.mk
+++ b/build/core/setup-toolchain.mk
@@ -17,7 +17,7 @@
 # to setup the target toolchain for a given platform/abi combination.
 #
 
-$(call assert-defined,TARGET_PLATFORM_LEVEL TARGET_ARCH TARGET_ARCH_ABI)
+$(call assert-defined,TARGET_PLATFORM TARGET_ARCH TARGET_ARCH_ABI)
 $(call assert-defined,NDK_APPS NDK_APP_STL)
 
 # Check that we have a toolchain that supports the current ABI.
@@ -61,11 +61,18 @@
     TARGET_TOOLCHAIN := $(NDK_TOOLCHAIN)
 endif # NDK_TOOLCHAIN is not empty
 
+TARGET_ABI := $(TARGET_PLATFORM)-$(TARGET_ARCH_ABI)
+
+TARGET_PREBUILT_SHARED_LIBRARIES :=
+
 # Define default values for TOOLCHAIN_NAME, this can be overriden in
 # the setup file.
 TOOLCHAIN_NAME   := $(TARGET_TOOLCHAIN)
 TOOLCHAIN_VERSION := $(call last,$(subst -,$(space),$(TARGET_TOOLCHAIN)))
 
+# We expect the gdbserver binary for this toolchain to be located at its root.
+TARGET_GDBSERVER := $(NDK_ROOT)/prebuilt/android-$(TARGET_ARCH)/gdbserver/gdbserver
+
 # compute NDK_APP_DST_DIR as the destination directory for the generated files
 NDK_APP_DST_DIR := $(NDK_APP_LIBS_OUT)/$(TARGET_ARCH_ABI)
 
@@ -80,7 +87,7 @@
 # Note that these are not needed for the typical case of invoking Clang, as
 # Clang already knows where the sysroot is relative to itself. We still need to
 # manually refer to these in some places because other tools such as yasm and
-# don't have this knowledge.
+# the renderscript compiler don't have this knowledge.
 
 # SYSROOT_INC points to a directory that contains all public header files for a
 # given platform.
@@ -108,9 +115,13 @@
 
 clean-installed-binaries::
 
+include $(BUILD_SYSTEM)/gdb.mk
+
 # free the dictionary of LOCAL_MODULE definitions
 $(call modules-clear)
 
+$(call ndk-stl-select,$(NDK_APP_STL))
+
 # now parse the Android.mk for the application, this records all
 # module declarations, but does not populate the dependency graph yet.
 include $(NDK_APP_BUILD_SCRIPT)
@@ -124,13 +135,14 @@
     # has -fsanitize in its ldflags.
     include $(BUILD_SYSTEM)/sanitizers.mk
     include $(BUILD_SYSTEM)/openmp.mk
-    include $(BUILD_SYSTEM)/install_stl.mk
 
     ifneq ($(NDK_APP_WRAP_SH_$(TARGET_ARCH_ABI)),)
         include $(BUILD_SYSTEM)/install_wrap_sh.mk
     endif
 endif
 
+$(call ndk-stl-add-dependencies,$(NDK_APP_STL))
+
 # recompute all dependencies between modules
 $(call modules-compute-dependencies)
 
diff --git a/build/core/stl.mk b/build/core/stl.mk
deleted file mode 100644
index ef1c645..0000000
--- a/build/core/stl.mk
+++ /dev/null
@@ -1,63 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Interprets APP_STL to configure default cflags/ldflags and install rules for
-# libc++ runtime libraries as necessary.
-
-ifneq (,$(call module_needs_clangxx,$(LOCAL_MODULE)))
-
-ifeq ($(APP_STL),none)
-LOCAL_CPPFLAGS += -nostdinc++
-LOCAL_LDFLAGS += -nostdlib++
-else ifeq ($(APP_STL),system)
-# TODO: Actually use the system STL headers for that mode or remove.
-#
-# I'm not sure how long this has been broken, but released NDKs do not use the
-# bionic C++ headers when APP_STL=system, they use the libc++ headers. That's
-# almost certainly unintentional.
-#
-# There may not actually be any compatibility issues with this because bionic's
-# libstdc++ only provides new and delete anyway (the rest of the behavior is
-# just headers that re-expose C APIs in the std namespace). We could choose to
-# delete bionic's headers and keep this "bug" instead.
-
-# LOCAL_CPPFLAGS += -stdlib=libstdc++
-# LOCAL_LDFLAGS += -stdlib=libstdc++
-
-# TODO: Remove when https://reviews.llvm.org/D55856 is merged.
-#
-# The system STL Android.mk will export -lstdc++, but the Clang driver will
-# helpfully rewrite -lstdc++ to whatever the default C++ stdlib linker
-# arguments are, except in the presence of -nostdlib and -nodefaultlibs.
-# That part of the driver does not account for -nostdlib++. We can fix the
-# behavior by using -stdlib=libstdc++ so it rewrites -lstdc++ to -lstdc++
-# instead of -lc++.
-LOCAL_LDFLAGS += -stdlib=libstdc++
-
-ifneq (,$(call module-has-c++-features,$(LOCAL_MODULE),rtti exceptions))
-	LOCAL_LDLIBS += -lc++abi
-endif
-
-else ifeq ($(APP_STL),c++_static)
-LOCAL_LDFLAGS += -static-libstdc++
-endif
-
-# Else c++_shared, and no flags are needed. Shared libc++ is the default
-# behavior for Android targets in Clang.
-#
-# Invalid values will be checked by ndk-stl-check.
-
-endif
diff --git a/build/core/toolchains/aarch64-linux-android-clang/setup.mk b/build/core/toolchains/aarch64-linux-android-clang/setup.mk
index 7d1312a..b3ce42c 100644
--- a/build/core/toolchains/aarch64-linux-android-clang/setup.mk
+++ b/build/core/toolchains/aarch64-linux-android-clang/setup.mk
@@ -18,7 +18,6 @@
 
 TARGET_TOOLCHAIN_ARCH_LIB_DIR := aarch64
 TARGET_ASAN_BASENAME := libclang_rt.asan-aarch64-android.so
-TARGET_TSAN_BASENAME := libclang_rt.tsan-aarch64-android.so
 TARGET_UBSAN_BASENAME := libclang_rt.ubsan_standalone-aarch64-android.so
 
 TARGET_CFLAGS := -fpic
diff --git a/build/core/toolchains/arm-linux-androideabi-clang/setup.mk b/build/core/toolchains/arm-linux-androideabi-clang/setup.mk
index ea5342e..61c6430 100644
--- a/build/core/toolchains/arm-linux-androideabi-clang/setup.mk
+++ b/build/core/toolchains/arm-linux-androideabi-clang/setup.mk
@@ -22,6 +22,12 @@
 
 TARGET_CFLAGS := -fpic
 
+# Clang does not set this up properly when using -fno-integrated-as.
+# https://github.com/android-ndk/ndk/issues/906
+TARGET_CFLAGS += -march=armv7-a
+
+TARGET_CFLAGS.no_neon := -mfpu=vfpv3-d16
+
 TARGET_arm_release_CFLAGS := \
     -O2 \
     -DNDEBUG \
@@ -44,6 +50,7 @@
 
 # This function will be called to determine the target CFLAGS used to build
 # a C or Assembler source file, based on its tags.
+#
 TARGET-process-src-files-tags = \
 $(eval __arm_sources := $(call get-src-files-with-tag,arm)) \
 $(eval __thumb_sources := $(call get-src-files-without-tag,arm)) \
@@ -61,5 +68,8 @@
 $(call set-src-files-target-cflags,\
     $(call set_intersection,$(__thumb_sources),$(__release_sources)),\
     $(TARGET_thumb_release_CFLAGS)) \
+$(call add-src-files-target-cflags,\
+    $(call get-src-files-with-tag,no_neon),\
+    $(TARGET_CFLAGS.no_neon)) \
 $(call set-src-files-text,$(__arm_sources),arm) \
 $(call set-src-files-text,$(__thumb_sources),thumb)
diff --git a/build/core/toolchains/riscv64-linux-android-clang/config.mk b/build/core/toolchains/riscv64-linux-android-clang/config.mk
deleted file mode 100644
index cc1b4ce..0000000
--- a/build/core/toolchains/riscv64-linux-android-clang/config.mk
+++ /dev/null
@@ -1,20 +0,0 @@
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# config file for the riscv64 clang toolchain for the Android NDK the real meat
-# is in the setup.mk file adjacent to this one
-#
-TOOLCHAIN_ARCH := riscv64
-TOOLCHAIN_ABIS := riscv64
diff --git a/build/core/toolchains/riscv64-linux-android-clang/setup.mk b/build/core/toolchains/riscv64-linux-android-clang/setup.mk
deleted file mode 100644
index f367e16..0000000
--- a/build/core/toolchains/riscv64-linux-android-clang/setup.mk
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-TOOLCHAIN_NAME := riscv64-linux-android
-LLVM_TRIPLE := riscv64-none-linux-android
-
-TARGET_TOOLCHAIN_ARCH_LIB_DIR := riscv64
-TARGET_ASAN_BASENAME := libclang_rt.asan-riscv64-android.so
-# TODO(https://github.com/android/ndk/issues/1041): Add TSAN when it builds for RISCV64.
-# TARGET_TSAN_BASENAME := libclang_rt.tsan-riscv64-android.so
-TARGET_UBSAN_BASENAME := libclang_rt.ubsan_standalone-riscv64-android.so
-
-TARGET_CFLAGS := -fPIC
-
-TARGET_riscv64_release_CFLAGS := \
-    -O2 \
-    -DNDEBUG \
-
-TARGET_riscv64_debug_CFLAGS := \
-    -O0 \
-    -UNDEBUG \
-    -fno-limit-debug-info \
-
-# This function will be called to determine the target CFLAGS used to build
-# a C or Assembler source file, based on its tags.
-#
-TARGET-process-src-files-tags = \
-$(eval __debug_sources := $(call get-src-files-with-tag,debug)) \
-$(eval __release_sources := $(call get-src-files-without-tag,debug)) \
-$(call set-src-files-target-cflags, $(__debug_sources), $(TARGET_riscv64_debug_CFLAGS)) \
-$(call set-src-files-target-cflags, $(__release_sources),$(TARGET_riscv64_release_CFLAGS)) \
-
-# The ABI-specific sub-directory that the SDK tools recognize for
-# this toolchain's generated binaries
-TARGET_ABI_SUBDIR := riscv64
diff --git a/build/core/toolchains/x86_64-clang/setup.mk b/build/core/toolchains/x86_64-clang/setup.mk
index 759b68e..dbe2d22 100644
--- a/build/core/toolchains/x86_64-clang/setup.mk
+++ b/build/core/toolchains/x86_64-clang/setup.mk
@@ -18,7 +18,6 @@
 
 TARGET_TOOLCHAIN_ARCH_LIB_DIR := x86_64
 TARGET_ASAN_BASENAME := libclang_rt.asan-x86_64-android.so
-TARGET_TSAN_BASENAME := libclang_rt.tsan-x86_64-android.so
 TARGET_UBSAN_BASENAME := libclang_rt.ubsan_standalone-x86_64-android.so
 
 TARGET_CFLAGS := -fPIC
diff --git a/build/dump_compile_commands.py b/build/dump_compile_commands.py
index 38ee90e..c4f8aa8 100644
--- a/build/dump_compile_commands.py
+++ b/build/dump_compile_commands.py
@@ -23,75 +23,67 @@
 import argparse
 import json
 import os
-from pathlib import Path
-
-from shlex import join
 
 
-def get_argument_parser() -> argparse.ArgumentParser:
+def get_argument_parser():
     """Parses and returns command line arguments."""
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        "-o",
-        "--output",
-        type=os.path.realpath,  # type: ignore
+        '-o',
+        '--output',
+        type=os.path.realpath,
         required=True,
-        help="Path to output file",
-    )
+        help='Path to output file')
 
     parser.add_argument(
-        "-d",
-        "--directory",
-        type=os.path.realpath,  # type: ignore
-        help="Working directory for the compile command.",
-    )
+        '-d',
+        '--directory',
+        type=os.path.realpath,
+        help='Working directory for the compile command.')
 
-    parser.add_argument("-f", "--file", help="Source file.")
-    parser.add_argument("--object-file", help="Object file.")
+    parser.add_argument('-f', '--file', help='Source file.')
+    parser.add_argument('--object-file', help='Object file.')
 
     parser.add_argument(
-        "--command-file",
-        type=os.path.realpath,  # type: ignore
-        help="Compilation command list file.",
-    )
+        '--command-file',
+        type=os.path.realpath,
+        help='Compilation command list file.')
 
     parser.add_argument(
-        "compile_command",
-        metavar="COMPILE_COMMAND",
+        'compile_command',
+        metavar='COMPILE_COMMAND',
         nargs=argparse.REMAINDER,
-        help="Compilation command.",
-    )
+        help='Compilation command.')
 
     return parser
 
 
-def main() -> None:
+def main():
     """Program entry point."""
     parser = get_argument_parser()
     args = parser.parse_args()
 
     if args.command_file and args.compile_command:
-        parser.error("--command-file and COMPILE_COMMAND are mutually exclusive")
+        parser.error(
+            '--command-file and COMPILE_COMMAND are mutually exclusive')
 
     if not args.command_file and not args.compile_command:
-        parser.error("Either --command-file or COMPILE_COMMAND is required.")
+        parser.error('Either --command-file or COMPILE_COMMAND is required.')
 
-    command = join(args.compile_command)
+    command = ' '.join(args.compile_command)
     if args.command_file:
-        command = Path(args.command_file).read_text(encoding="utf-8").strip()
+        with open(args.command_file) as command_file:
+            command = command_file.read().strip()
 
-    with open(args.output, "w", encoding="utf-8") as out_file:
-        json.dump(
-            {
-                "directory": args.directory,
-                "file": args.file,
-                "output": args.object_file,
-                "command": command,
-            },
-            out_file,
-        )
+    with open(args.output, 'w') as out_file:
+        json.dump({
+            'directory': args.directory,
+            'file': args.file,
+            'output': args.object_file,
+            'command': command,
+        }, out_file)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/build/extract_manifest.py b/build/extract_manifest.py
index 1898562..48dccbd 100644
--- a/build/extract_manifest.py
+++ b/build/extract_manifest.py
@@ -21,33 +21,23 @@
 import xml.etree.ElementTree
 
 
-def parse_args() -> argparse.Namespace:
+def parse_args():
     """Parse and return command line arguments."""
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        "property",
-        metavar="PROPERTY",
-        choices=("minSdkVersion", "debuggable"),
-        help="Property to extract from the manifest file.",
-    )
+        'property', metavar='PROPERTY',
+        choices=('minSdkVersion', 'debuggable'),
+        help='Property to extract from the manifest file.')
 
     parser.add_argument(
-        "manifest_file",
-        metavar="MANIFEST_FILE",
-        type=os.path.abspath,  # type: ignore
-        help="Path to the AndroidManifest.xml file.",
-    )
+        'manifest_file', metavar='MANIFEST_FILE', type=os.path.abspath,
+        help='Path to the AndroidManifest.xml file.')
 
     return parser.parse_args()
 
 
-def get_rpath_attribute(
-    root: xml.etree.ElementTree.Element,
-    element_path: str,
-    attribute: str,
-    default: str = "",
-) -> str:
+def get_rpath_attribute(root, element_path, attribute, default=None):
     """Returns the value of an attribute at an rpath.
 
     If more than one element exists with the same name, only the first is
@@ -62,56 +52,57 @@
         The attribute's value as a string if found, else the value of
         `default`.
     """
-    ns_url = "http://schemas.android.com/apk/res/android"
+    ns_url = 'http://schemas.android.com/apk/res/android'
     ns = {
-        "android": ns_url,
+        'android': ns_url,
     }
 
     elem = root.find(element_path, ns)
     if elem is None:
-        return ""
+        return ''
     # ElementTree elements don't have the same helpful namespace parameter that
     # the find family does :(
-    attrib_name = attribute.replace("android:", "{" + ns_url + "}")
-    return str(elem.get(attrib_name, default))
+    attrib_name = attribute.replace('android:', '{' + ns_url + '}')
+    return elem.get(attrib_name, default)
 
 
-def get_minsdkversion(root: xml.etree.ElementTree.Element) -> str:
+def get_minsdkversion(root):
     """Finds and returns the value of android:minSdkVersion in the manifest.
 
     Returns:
         String form of android:minSdkVersion if found, else the empty string.
     """
-    return get_rpath_attribute(root, "./uses-sdk", "android:minSdkVersion", "")
+    return get_rpath_attribute(root, './uses-sdk', 'android:minSdkVersion', '')
 
 
-def get_debuggable(root: xml.etree.ElementTree.Element) -> str:
+def get_debuggable(root):
     """Finds and returns the value of android:debuggable in the manifest.
 
     Returns:
         String form of android:debuggable if found, else the empty string.
     """
-    debuggable = get_rpath_attribute(root, "./application", "android:debuggable", "")
+    debuggable = get_rpath_attribute(
+        root, './application', 'android:debuggable', '')
 
     # Though any such manifest would be invalid, the awk script rewrote bogus
     # values to false. Missing attributes should also be false.
-    if debuggable != "true":
-        debuggable = "false"
+    if debuggable != 'true':
+        debuggable = 'false'
 
     return debuggable
 
 
-def main() -> None:
+def main():
     args = parse_args()
 
     tree = xml.etree.ElementTree.parse(args.manifest_file)
-    if args.property == "minSdkVersion":
+    if args.property == 'minSdkVersion':
         print(get_minsdkversion(tree.getroot()))
-    elif args.property == "debuggable":
+    elif args.property == 'debuggable':
         print(get_debuggable(tree.getroot()))
     else:
         raise ValueError
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/build/extract_platform.py b/build/extract_platform.py
index 3b59176..4a272d6 100644
--- a/build/extract_platform.py
+++ b/build/extract_platform.py
@@ -19,42 +19,38 @@
 import argparse
 import os.path
 import re
-from typing import TextIO
 
 
-def parse_args() -> argparse.Namespace:
+def parse_args():
     """Parse and return command line arguments."""
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        "properties_file",
-        metavar="PROPERTIES_FILE",
-        type=os.path.abspath,  # type: ignore
-        help="Path to the project.properties file.",
-    )
+        'properties_file', metavar='PROPERTIES_FILE', type=os.path.abspath,
+        help='Path to the project.properties file.')
 
     return parser.parse_args()
 
 
-def get_platform(properties_file: TextIO) -> str:
+def get_platform(properties_file):
     """Finds and returns the platform version in the properties file.
 
     Returns:
         String form of the platform version if found, else "unknown".
     """
-    android_regex = re.compile(r"(android-\w+)")
-    vendor_regex = re.compile(r":(\d+)\s*$")
+    android_regex = re.compile(r'(android-\w+)')
+    vendor_regex = re.compile(r':(\d+)\s*$')
     for line in properties_file:
         match = android_regex.search(line)
         if match is not None:
             return match.group(1)
         match = vendor_regex.search(line)
         if match is not None:
-            return "android-{}".format(match.group(1))
-    return "unknown"
+            return 'android-{}'.format(match.group(1))
+    return 'unknown'
 
 
-def main() -> None:
+def main():
     args = parse_args()
 
     # Following the comment in the old awk script, we're trying to match:
@@ -69,9 +65,9 @@
     # android- may be followed by either the numeric API level or the named
     # platform. Note that while we can parse any name, ndk-build only support a
     # small handful.
-    with open(args.properties_file, encoding="utf-8") as properties_file:
+    with open(args.properties_file) as properties_file:
         print(get_platform(properties_file))
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/build/gen_compile_db.py b/build/gen_compile_db.py
index 5cf7a90..23e1ecc 100644
--- a/build/gen_compile_db.py
+++ b/build/gen_compile_db.py
@@ -23,59 +23,57 @@
 import argparse
 import json
 import os
-from pathlib import Path
 
 
-def parse_args() -> argparse.Namespace:
+def parse_args():
     """Parses and returns command line arguments."""
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        "-o", "--output", type=os.path.realpath, help="Path to output file"
-    )  # type: ignore
+        '-o', '--output', type=os.path.realpath, help='Path to output file')
 
-    def maybe_list_file(arg: str) -> str:
-        if arg.startswith("@"):
-            return "@" + os.path.realpath(arg[1:])
+    def maybe_list_file(arg):
+        if arg.startswith('@'):
+            return '@' + os.path.realpath(arg[1:])
         return os.path.realpath(arg)
 
     parser.add_argument(
-        "command_files",
-        metavar="FILE",
+        'command_files',
+        metavar='FILE',
         type=maybe_list_file,
-        nargs="+",
-        help=(
-            "Path to the compilation database for a single object. If the "
-            "argument begins with @ it will be treated as a list file "
-            "containing paths to the one or more JSON files."
-        ),
-    )
+        nargs='+',
+        help=('Path to the compilation database for a single object. If the '
+              'argument begins with @ it will be treated as a list file '
+              'containing paths to the one or more JSON files.'))
 
     return parser.parse_args()
 
 
-def main() -> None:
+def main():
     """Program entry point."""
     args = parse_args()
 
     all_commands = []
     command_files = []
     for command_file in args.command_files:
-        if command_file.startswith("@"):
-            list_file = Path(command_file[1:])
-            command_files.extend(list_file.read_text(encoding="utf-8").split())
+        if command_file.startswith('@'):
+            with open(command_file[1:]) as list_file:
+                command_files.extend(list_file.read().split())
         else:
             command_files.append(command_file)
 
     for command_file_path in command_files:
-        with open(command_file_path, encoding="utf-8") as command_file:
+        with open(command_file_path) as command_file:
             all_commands.append(json.load(command_file))
 
-    with open(args.output, "w", encoding="utf-8") as out_file:
+    with open(args.output, 'w') as out_file:
         json.dump(
-            all_commands, out_file, sort_keys=True, indent=4, separators=(",", ": ")
-        )
+            all_commands,
+            out_file,
+            sort_keys=True,
+            indent=4,
+            separators=(',', ': '))
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/build/gen_cygpath.py b/build/gen_cygpath.py
index 0100e80..f7f4dd8 100644
--- a/build/gen_cygpath.py
+++ b/build/gen_cygpath.py
@@ -26,7 +26,7 @@
 import sys
 
 
-def get_mounts(mount_output: str) -> list[tuple[str, str]]:
+def get_mounts(mount_output):
     """Parses the output of mount and returns a dict of mounts.
 
     Args:
@@ -35,11 +35,11 @@
     Returns:
         A list of tuples mapping cygwin paths to Windows paths.
     """
-    mount_regex = re.compile(r"^(\S+) on (\S+) .*$")
+    mount_regex = re.compile(r'^(\S+) on (\S+) .*$')
 
     # We use a list of tuples rather than a dict because we want to recurse on
     # the list later anyway.
-    mounts: list[tuple[str, str]] = []
+    mounts = []
     for line in mount_output.splitlines():
         # Cygwin's mount doesn't use backslashes even in Windows paths, so no
         # need to replace here.
@@ -47,16 +47,16 @@
         if match is not None:
             win_path = match.group(1)
             cyg_path = match.group(2)
-            if cyg_path == "/":
+            if cyg_path == '/':
                 # Since we're going to be using patsubst on these, we need to
                 # make sure that the rule for / is applied last, otherwise
                 # we'll replace all other cygwin paths with that one.
                 mounts.insert(0, (cyg_path, win_path))
-            elif cyg_path.startswith("/cygdrive/"):
+            elif cyg_path.startswith('/cygdrive/'):
                 # We need both /cygdrive/c and /cygdrive/C to point to C:.
                 letter = posixpath.basename(cyg_path)
-                lower_path = posixpath.join("/cygdrive", letter.lower())
-                upper_path = posixpath.join("/cygdrive", letter.upper())
+                lower_path = posixpath.join('/cygdrive', letter.lower())
+                upper_path = posixpath.join('/cygdrive', letter.upper())
                 mounts.append((lower_path, win_path))
                 mounts.append((upper_path, win_path))
             else:
@@ -65,7 +65,7 @@
     return mounts
 
 
-def make_cygpath_function(mounts: list[tuple[str, str]]) -> str:
+def make_cygpath_function(mounts):
     """Creates a make function that can be used in place of cygpath.
 
     Args:
@@ -77,21 +77,20 @@
     # We're building a bunch of nested patsubst calls. Once we've written each
     # of the calls, we pass the function input to the inner most call.
     if not mounts:
-        return "$1"
+        return '$1'
 
     cyg_path, win_path = mounts[0]
-    if not cyg_path.endswith("/"):
-        cyg_path += "/"
-    if not win_path.endswith("/"):
-        win_path += "/"
+    if not cyg_path.endswith('/'):
+        cyg_path += '/'
+    if not win_path.endswith('/'):
+        win_path += '/'
 
     other_mounts = mounts[1:]
-    return "$(patsubst {}%,{}%,\n{})".format(
-        cyg_path, win_path, make_cygpath_function(other_mounts)
-    )
+    return '$(patsubst {}%,{}%,\n{})'.format(
+        cyg_path, win_path, make_cygpath_function(other_mounts))
 
 
-def main() -> None:
+def main():
     # We're invoked from make and piped the output of `mount` so we can
     # determine what mappings to make.
     mount_output = sys.stdin.read()
@@ -99,5 +98,5 @@
     print(make_cygpath_function(mounts))
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/build/ldflags_to_sanitizers.py b/build/ldflags_to_sanitizers.py
index e2af032..4a161aa 100644
--- a/build/ldflags_to_sanitizers.py
+++ b/build/ldflags_to_sanitizers.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2018 The Android Open Source Project
 #
@@ -18,23 +18,22 @@
 from __future__ import print_function
 
 import sys
-from typing import TextIO
 
 
-def sanitizers_from_args(args: list[str]) -> list[str]:
+def sanitizers_from_args(args):
     """Returns the sanitizers enabled by a given set of ldflags."""
     sanitizers = set()
     for arg in args:
-        if arg.startswith("-fsanitize="):
-            sanitizer_list = arg.partition("=")[2]
-            sanitizers |= set(sanitizer_list.split(","))
-        elif arg.startswith("-fno-sanitize="):
-            sanitizer_list = arg.partition("=")[2]
-            sanitizers -= set(sanitizer_list.split(","))
+        if arg.startswith('-fsanitize='):
+            sanitizer_list = arg.partition('=')[2]
+            sanitizers |= set(sanitizer_list.split(','))
+        elif arg.startswith('-fno-sanitize='):
+            sanitizer_list = arg.partition('=')[2]
+            sanitizers -= set(sanitizer_list.split(','))
     return sorted(list(sanitizers))
 
 
-def argv_to_module_arg_lists(args: list[str]) -> tuple[list[str], list[list[str]]]:
+def argv_to_module_arg_lists(args):
     """Converts module ldflags from argv format to per-module lists.
 
     Flags are passed to us in the following format:
@@ -44,32 +43,31 @@
     per-module lists, i.e.:
         ['global flag'], [['flag1', 'flag2'], ['flag1', 'flag3']]
     """
-    modules: list[list[str]] = [[]]
+    modules = [[]]
     for arg in args:
-        if arg == "--module":
+        if arg == '--module':
             modules.append([])
         else:
             modules[-1].append(arg)
     return modules[0], modules[1:]
 
 
-def main(argv: list[str], stream: TextIO = sys.stdout) -> None:
+def main(argv, stream=sys.stdout):
     """Program entry point."""
     # The only args we're guaranteed to see are the program name and at least
     # one --module. GLOBAL_FLAGS might be empty, as might any of the
     # MODULE_FLAGS sections.
     if len(argv) < 2:
         sys.exit(
-            "usage: ldflags_to_sanitizers.py [GLOBAL_FLAGS] "
-            "--module [MODULE_FLAGS] [--module [MODULE_FLAGS]...]"
-        )
+            'usage: ldflags_to_sanitizers.py [GLOBAL_FLAGS] '
+            '--module [MODULE_FLAGS] [--module [MODULE_FLAGS]...]')
 
     global_flags, modules_flags = argv_to_module_arg_lists(argv[1:])
     all_sanitizers = list(sanitizers_from_args(global_flags))
     for module_flags in modules_flags:
         all_sanitizers.extend(sanitizers_from_args(module_flags))
-    print(" ".join(sorted(set(all_sanitizers))), file=stream)
+    print(' '.join(sorted(set(all_sanitizers))), file=stream)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main(sys.argv)
diff --git a/ndk/test/buildtest/__init__.py b/build/lib/__init__.py
similarity index 100%
rename from ndk/test/buildtest/__init__.py
rename to build/lib/__init__.py
diff --git a/build/lib/build_support.py b/build/lib/build_support.py
new file mode 100644
index 0000000..e6c7dd9
--- /dev/null
+++ b/build/lib/build_support.py
@@ -0,0 +1,148 @@
+#
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import argparse
+import multiprocessing
+import os
+import site
+import subprocess
+import sys
+
+
+# "build" is not a valid package name for setuptools. This package will be
+# silently removed from the source distribution because setuptools thinks it's
+# the build directory rather than a python package named build. Pieces of this
+# package are being moved into the ndk package where they belong, but will
+# continue to be exported from here until we can chase down all the other
+# users.
+THIS_DIR = os.path.realpath(os.path.dirname(__file__))
+site.addsitedir(os.path.join(THIS_DIR, '../..'))
+
+# pylint: disable=wrong-import-position,unused-import
+from ndk.abis import (
+    ALL_ABIS,
+    ALL_ARCHITECTURES,
+    ALL_TOOLCHAINS,
+    ALL_TRIPLES,
+    LP32_ABIS,
+    LP64_ABIS,
+    arch_to_abis,
+    arch_to_toolchain,
+    arch_to_triple,
+    toolchain_to_arch,
+)
+
+from ndk.hosts import Host, get_default_host, host_to_tag
+
+from ndk.paths import (
+    android_path,
+    get_dist_dir,
+    get_out_dir,
+    ndk_path,
+    sysroot_path,
+    toolchain_path,
+)
+# pylint: enable=wrong-import-position,unused-import
+
+
+def minimum_platform_level(abi):
+    import ndk.abis
+    return ndk.abis.min_api_for_abi(abi)
+
+
+def jobs_arg():
+    return '-j{}'.format(multiprocessing.cpu_count() * 2)
+
+
+def build(cmd, args, intermediate_package=False):
+    package_dir = args.out_dir if intermediate_package else args.dist_dir
+    common_args = [
+        '--verbose',
+        '--package-dir={}'.format(package_dir),
+    ]
+
+    build_env = dict(os.environ)
+    build_env['NDK_BUILDTOOLS_PATH'] = android_path('ndk/build/tools')
+    build_env['ANDROID_NDK_ROOT'] = ndk_path()
+    subprocess.check_call(cmd + common_args, env=build_env)
+
+
+def make_package(name, directory, out_dir):
+    """Pacakges an NDK module for release.
+
+    Makes a zipfile of the single NDK module that can be released in the SDK
+    manager.
+
+    Args:
+        name: Name of the final package, excluding extension.
+        directory: Directory to be packaged.
+        out_dir: Directory to place package.
+    """
+    if not os.path.isdir(directory):
+        raise ValueError('directory must be a directory: ' + directory)
+
+    path = os.path.join(out_dir, name + '.zip')
+    if os.path.exists(path):
+        os.unlink(path)
+
+    cwd = os.getcwd()
+    os.chdir(os.path.dirname(directory))
+    basename = os.path.basename(directory)
+    try:
+        subprocess.check_call(
+            ['zip', '-x', '*.pyc', '-x', '*.pyo', '-x', '*.swp', '-x',
+             '*.git*', '-0qr', path, basename])
+    finally:
+        os.chdir(cwd)
+
+
+class ArgParser(argparse.ArgumentParser):
+    def __init__(self):
+        super(ArgParser, self).__init__()
+
+        self.add_argument(
+            '--host',
+            choices=Host,
+            type=Host,
+            default=get_default_host(),
+            help='Build binaries for given OS (e.g. linux).')
+
+        self.add_argument(
+            '--out-dir', help='Directory to place temporary build files.',
+            type=os.path.realpath, default=get_out_dir())
+
+        # The default for --dist-dir has to be handled after parsing all
+        # arguments because the default is derived from --out-dir. This is
+        # handled in run().
+        self.add_argument(
+            '--dist-dir', help='Directory to place the packaged artifact.',
+            type=os.path.realpath)
+
+
+def run(main_func, arg_parser=ArgParser):
+    if 'ANDROID_BUILD_TOP' not in os.environ:
+        top = os.path.join(os.path.dirname(__file__), '../../..')
+        os.environ['ANDROID_BUILD_TOP'] = os.path.realpath(top)
+
+    args = arg_parser().parse_args()
+
+    if args.dist_dir is None:
+        args.dist_dir = get_dist_dir(args.out_dir)
+
+    # We want any paths to be relative to the invoked build script.
+    main_filename = os.path.realpath(sys.modules['__main__'].__file__)
+    os.chdir(os.path.dirname(main_filename))
+
+    main_func(args)
diff --git a/build/ndk-build b/build/ndk-build
index 845b473..85d10e6 100755
--- a/build/ndk-build
+++ b/build/ndk-build
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/bin/bash
 #
 # Copyright (C) 2010 The Android Open Source Project
 #
@@ -37,8 +37,8 @@
 #    cd ndk
 #    ./ndk-build -C <project-path>
 #
-PROGDIR=$(dirname "$0")
-PROGDIR=$(cd "$PROGDIR" && pwd -P)
+PROGDIR=`dirname $0`
+PROGDIR=`cd $PROGDIR && pwd -P`
 ANDROID_NDK_ROOT=$PROGDIR/..
 
 # Unset PYTHONPATH and PYTHONHOME to prevent the user's environment from
@@ -49,7 +49,7 @@
 
 # Check if absolute NDK path contain space
 #
-case "$PROGDIR" in
+case $PROGDIR in
     *\ *) echo "ERROR: NDK path cannot contain space"
           exit 1
         ;;
@@ -67,8 +67,9 @@
 
 PROJECT_PATH=
 PROJECT_PATH_NEXT=
+NDK_ANALYZER_OUT=
 for opt; do
-    if [ -z "$PROJECT_PATH" ] && [ "$PROJECT_PATH_NEXT" = "yes" ] ; then
+    if [ -z "$PROJECT_PATH" -a "$PROJECT_PATH_NEXT" = "yes" ] ; then
         PROJECT_PATH=$opt
         PROJECT_PATH_NEXT=
     else
@@ -85,6 +86,12 @@
           NDK_ANALYZE=*)
             NDK_ANALYZE=0
             ;;
+          NDK_ANALYZER_OUT=*)
+            NDK_ANALYZER_OUT=${opt#NDK_ANALYZER_OUT=}
+            ;;
+          APP_ABI=*)
+            APP_ABI=${opt#APP_ABI=}
+            ;;
           -C)
             PROJECT_PATH_NEXT="yes"
             ;;
@@ -111,14 +118,65 @@
 fi
 
 # Detect host operating system and architecture
-. "$ANDROID_NDK_ROOT/build/tools/ndk_bin_common.sh"
+# The 64-bit / 32-bit distinction gets tricky on Linux and Darwin because
+# uname -m returns the kernel's bit size, and it's possible to run with
+# a 64-bit kernel and a 32-bit userland.
+#
+HOST_OS=$(uname -s)
+case $HOST_OS in
+  Darwin) HOST_OS=darwin;;
+  Linux) HOST_OS=linux;;
+  FreeBsd) HOST_OS=freebsd;;
+  CYGWIN*|*_NT-*) HOST_OS=cygwin;;
+  *) echo "ERROR: Unknown host operating system: $HOST_OS"
+     exit 1
+esac
 log "HOST_OS=$HOST_OS"
+
+HOST_ARCH=$(uname -m)
+case $HOST_ARCH in
+    i?86) HOST_ARCH=x86;;
+    x86_64|amd64) HOST_ARCH=x86_64;;
+    *) echo "ERROR: Unknown host CPU architecture: $HOST_ARCH"
+       exit 1
+esac
 log "HOST_ARCH=$HOST_ARCH"
-log "HOST_TAG=$HOST_TAG"
+
+# Detect 32-bit userland on 64-bit kernels
+HOST_TAG="$HOST_OS-$HOST_ARCH"
+case $HOST_TAG in
+  linux-x86_64|darwin-x86_64)
+    # we look for x86_64 or x86-64 in the output of 'file' for our shell
+    # the -L flag is used to dereference symlinks, just in case.
+    file -L "$SHELL" | grep -q "x86[_-]64"
+    if [ $? != 0 ]; then
+      HOST_ARCH=x86
+      log "HOST_ARCH=$HOST_ARCH (32-bit userland detected)"
+    fi
+    ;;
+esac
+
+# Check that we have 64-bit binaries on 64-bit system, otherwise fallback
+# on 32-bit ones. This gives us more freedom in packaging the NDK.
+LOG_MESSAGE=
+if [ $HOST_ARCH = x86_64 ]; then
+  if [ ! -d $ANDROID_NDK_ROOT/prebuilt/$HOST_TAG ]; then
+    HOST_ARCH=x86
+    LOG_MESSAGE="(no 64-bit prebuilt binaries detected)"
+  fi
+fi
+
+HOST_TAG=$HOST_OS-$HOST_ARCH
+# Special case windows-x86 -> windows
+if [ $HOST_TAG = windows-x86 ]; then
+  HOST_TAG=windows
+fi
+log "HOST_TAG=$HOST_TAG $LOG_MESSAGE"
 
 # If GNUMAKE is defined, check that it points to a valid file
 if [ -n "$GNUMAKE" ] ; then
-    if ! ABS_GNUMAKE=$(which "$GNUMAKE" 2> /dev/null); then
+    ABS_GNUMAKE=`which $GNUMAKE 2> /dev/null`
+    if [ $? != 0 ] ; then
         echo "ERROR: Your GNUMAKE variable is defined to an invalid name: $GNUMAKE"
         echo "Please fix it to point to a valid make executable (e.g. /usr/bin/make)"
         exit 1
@@ -132,7 +190,8 @@
     GNUMAKE=$ANDROID_NDK_ROOT/prebuilt/$HOST_TAG/bin/make
     if [ ! -f "$GNUMAKE" ]; then
         # Otherwise, use 'make' and check that it is available
-        if ! GNUMAKE=$(which make 2> /dev/null); then
+        GNUMAKE=`which make 2> /dev/null`
+        if [ $? != 0 ] ; then
             echo "ERROR: Cannot find 'make' program. Please install Cygwin make package"
             echo "or define the GNUMAKE variable to point to it."
             exit 1
@@ -149,11 +208,12 @@
 # 'make' program in their PATH.
 #
 if [ "$OSTYPE" = "cygwin" ] ; then
-    GNUMAKE=$(cygpath -u "$GNUMAKE")
-    PROGDIR_MIXED=$(cygpath -m "$PROGDIR")
-    if ! ("$GNUMAKE" -f "$PROGDIR_MIXED/core/check-cygwin-make.mk" >/dev/null 2>&1); then
+    GNUMAKE=`cygpath -u $GNUMAKE`
+    PROGDIR_MIXED=`cygpath -m $PROGDIR`
+    CYGWIN_GNUMAKE=`$GNUMAKE -f "$PROGDIR_MIXED/core/check-cygwin-make.mk" 2>&1`
+    if [ $? != 0 ] ; then
         echo "ERROR: You are using a non-Cygwin compatible Make program."
-        echo "Currently using: $(cygpath -m "$GNUMAKE")"
+        echo "Currently using: `cygpath -m $GNUMAKE`"
         echo ""
         echo "To solve the issue, follow these steps:"
         echo ""
@@ -171,11 +231,58 @@
     log "Cygwin-compatible GNU make detected"
 fi
 
-NDK_ANALYZER_FLAGS=
 if [ "$NDK_ANALYZE" = 1 ]; then
-    # Continue supporting the old interface to the static analyzer. clang-tidy
-    # does all the same checks by default (and some new ones).
-    NDK_ANALYZER_FLAGS=APP_CLANG_TIDY=true
-fi
+    . $PROGDIR/tools/dev-defaults.sh  # for DEFAULT_LLVM_VERSION
 
-"$GNUMAKE" -O -f "$PROGDIR/core/build-local.mk" $NDK_ANALYZER_FLAGS "$@"
+    # Return flags send in env. or command line which are enough to retrive APP_ABI and TOOLCHAIN_PREFIX later
+    gen_flags ()
+    {
+        local FLAGS=
+
+        if [ -n "$PROJECT_PATH" ] ; then
+            FLAGS=$FLAGS" -C $PROJECT_PATH"
+        fi
+        if [ -n "$APP_ABI" ] ; then
+            FLAGS=$FLAGS" APP_ABI=$APP_ABI"
+        fi
+        echo "$FLAGS"
+    }
+
+    get_build_var ()
+    {
+        local VAR=$1
+        local FLAGS=`gen_flags`
+        $GNUMAKE --no-print-dir -f $PROGDIR/core/build-local.mk $FLAGS DUMP_${VAR} | tail -1
+    }
+
+    get_build_var_for_abi ()
+    {
+        local VAR=$1
+        local ABI=$2
+        local FLAGS=`gen_flags`
+        $GNUMAKE --no-print-dir -f $PROGDIR/core/build-local.mk $FLAGS DUMP_${VAR} APP_ABI=${ABI} | tail -1
+    }
+
+    APP_ABIS=`get_build_var APP_ABI`
+    for ABI in $APP_ABIS; do
+        LLVM_TOOLCHAIN_PREFIX=`get_build_var LLVM_TOOLCHAIN_PREFIX`
+        ANALYZER_OUT=`get_build_var NDK_APP_ANALYZER_OUT`
+
+        ANALYZER_CC=${LLVM_TOOLCHAIN_PREFIX}clang
+        ANALYZER_CXX=${LLVM_TOOLCHAIN_PREFIX}clang++
+
+        ANALYZER_OUT_FLAG=
+        if [ -n "$NDK_ANALYZER_OUT" ]; then
+            ANALYZER_OUT_FLAG="-o $NDK_ANALYZER_OUT/$ABI"
+        fi
+
+        perl ${LLVM_TOOLCHAIN_PREFIX}scan-build \
+            --use-cc $ANALYZER_CC \
+            --use-c++ $ANALYZER_CXX \
+            --status-bugs \
+            $ANALYZER_OUT_FLAG \
+            $GNUMAKE -f $PROGDIR/core/build-local.mk "$@" APP_ABI=$ABI
+    done
+else
+    $GNUMAKE -O -f $PROGDIR/core/build-local.mk "$@"
+fi
diff --git a/build/ndk-build.cmd b/build/ndk-build.cmd
index d45c5f9..c7e86b1 100755
--- a/build/ndk-build.cmd
+++ b/build/ndk-build.cmd
@@ -1,21 +1,9 @@
 @echo off
-setlocal
-
-rem This is checked in build-local.mk... but make on windows doesn't handle
-rem LAST_MAKEFILE correctly when the makefile is in a directory with spaces
-rem anyway, so that defense doesn't work either.
-rem https://github.com/android/ndk/issues/1400
-rem https://stackoverflow.com/a/29057742/632035
-for /f "tokens=2" %%a in ("%~dp0") do (
-    echo ERROR: NDK path cannot contain spaces
-    exit /b 1
-)
-
 rem Unset PYTHONPATH and PYTHONHOME to prevent the user's environment from
 rem affecting the Python that we invoke.
 rem See https://github.com/googlesamples/vulkan-basic-samples/issues/25
 set PYTHONHOME=
 set PYTHONPATH=
-set NDK_ROOT=%~dp0..
+set NDK_ROOT=%~dp0\..
 set PREBUILT_PATH=%NDK_ROOT%\prebuilt\windows-x86_64
 "%PREBUILT_PATH%\bin\make.exe" -O -f "%NDK_ROOT%\build\core\build-local.mk" SHELL=cmd %*
diff --git a/build/test_extract_manifest.py b/build/test_extract_manifest.py
index 70652c1..f55a349 100644
--- a/build/test_extract_manifest.py
+++ b/build/test_extract_manifest.py
@@ -23,9 +23,8 @@
 
 
 class ExtractMinSdkVersionTest(unittest.TestCase):
-    def testMinSdkVersion(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testMinSdkVersion(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -42,15 +41,14 @@
               </application>
               <uses-sdk android:minSdkVersion="9"/>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("9", build.extract_manifest.get_minsdkversion(root))
+        self.assertEqual(
+            '9', build.extract_manifest.get_minsdkversion(root))
 
-    def testUsesSdkMissingMinSdkVersion(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testUsesSdkMissingMinSdkVersion(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -67,15 +65,14 @@
               </application>
               <uses-sdk android:maxSdkVersion="21"/>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("", build.extract_manifest.get_minsdkversion(root))
+        self.assertEqual(
+            '', build.extract_manifest.get_minsdkversion(root))
 
-    def testNoUsesSdk(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testNoUsesSdk(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -91,17 +88,16 @@
                 </activity>
               </application>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("", build.extract_manifest.get_minsdkversion(root))
+        self.assertEqual(
+            '', build.extract_manifest.get_minsdkversion(root))
 
 
 class ExtractDebuggableTest(unittest.TestCase):
-    def testIsDebuggable(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testIsDebuggable(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -113,15 +109,14 @@
                 android:debuggable="true">
               </application>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("true", build.extract_manifest.get_debuggable(root))
+        self.assertEqual(
+            'true', build.extract_manifest.get_debuggable(root))
 
-    def testIsNotDebuggable(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testIsNotDebuggable(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -133,15 +128,14 @@
                 android:debuggable="false">
               </application>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("false", build.extract_manifest.get_debuggable(root))
+        self.assertEqual(
+            'false', build.extract_manifest.get_debuggable(root))
 
-    def testBogusValue(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testBogusValue(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -153,15 +147,14 @@
                 android:debuggable="bogus">
               </application>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("false", build.extract_manifest.get_debuggable(root))
+        self.assertEqual(
+            'false', build.extract_manifest.get_debuggable(root))
 
-    def testNotSet(self) -> None:
-        xml_str = textwrap.dedent(
-            """\
+    def testNotSet(self):
+        xml_str = textwrap.dedent("""\
             <?xml version="1.0" encoding="utf-8"?>
             <manifest
                 xmlns:android="http://schemas.android.com/apk/res/android"
@@ -176,8 +169,8 @@
               </application>
               <uses-sdk android:maxSdkVersion="21"/>
             </manifest>
-            """
-        )
+            """)
         root = xml.etree.ElementTree.fromstring(xml_str)
 
-        self.assertEqual("false", build.extract_manifest.get_debuggable(root))
+        self.assertEqual(
+            'false', build.extract_manifest.get_debuggable(root))
diff --git a/build/test_extract_platform.py b/build/test_extract_platform.py
index 08b0c2e..4f81670 100644
--- a/build/test_extract_platform.py
+++ b/build/test_extract_platform.py
@@ -14,7 +14,6 @@
 # limitations under the License.
 #
 from __future__ import print_function
-from io import StringIO
 
 import textwrap
 import unittest
@@ -23,52 +22,41 @@
 
 
 class ExtractPlatformTest(unittest.TestCase):
-    def testNumericVersion(self) -> None:
-        props_file = StringIO(
-            textwrap.dedent(
-                """\
+    def testNumericVersion(self):
+        props_file = textwrap.dedent("""\
             some
             # other
             junk
             target=android-9
             foo
-            """
-            )
-        )
+            """).splitlines()
 
-        self.assertEqual("android-9", build.extract_platform.get_platform(props_file))
+        self.assertEqual(
+            'android-9', build.extract_platform.get_platform(props_file))
 
-    def testNamedVersion(self) -> None:
-        props_file = StringIO(
-            textwrap.dedent(
-                """\
+    def testNamedVersion(self):
+        props_file = textwrap.dedent("""\
             some
             # other
             junk
             target=android-nougat
             foo
-            """
-            )
-        )
+            """).splitlines()
 
         self.assertEqual(
-            "android-nougat", build.extract_platform.get_platform(props_file)
-        )
+            'android-nougat', build.extract_platform.get_platform(props_file))
 
-    def testVendorVersion(self) -> None:
-        props_file = StringIO(
-            textwrap.dedent(
-                """\
+    def testVendorVersion(self):
+        props_file = textwrap.dedent("""\
             some
             # other
             junk
             target=vendor:something:21
             foo
-            """
-            )
-        )
+            """).splitlines()
 
-        self.assertEqual("android-21", build.extract_platform.get_platform(props_file))
+        self.assertEqual(
+            'android-21', build.extract_platform.get_platform(props_file))
 
-    def testNoVersion(self) -> None:
-        self.assertEqual("unknown", build.extract_platform.get_platform(StringIO("")))
+    def testNoVersion(self):
+        self.assertEqual('unknown', build.extract_platform.get_platform([]))
diff --git a/build/test_gen_cygpath.py b/build/test_gen_cygpath.py
index 367c351..df36048 100644
--- a/build/test_gen_cygpath.py
+++ b/build/test_gen_cygpath.py
@@ -22,67 +22,64 @@
 
 
 class GetMountsTest(unittest.TestCase):
-    def testSingleMount(self) -> None:
-        mount_output = "C:/cygwin on / type ntfs (binary,auto)"
+    def testSingleMount(self):
+        mount_output = 'C:/cygwin on / type ntfs (binary,auto)'
         self.assertEqual(
-            [("/", "C:/cygwin")], build.gen_cygpath.get_mounts(mount_output)
-        )
+            [('/', 'C:/cygwin')], build.gen_cygpath.get_mounts(mount_output))
 
-    def testCaseInsensitiveMount(self) -> None:
-        mount_output = "C: on /cygdrive/c type ntfs"
+    def testCaseInsensitiveMount(self):
+        mount_output = 'C: on /cygdrive/c type ntfs'
         expected_output = [
-            ("/cygdrive/c", "C:"),
-            ("/cygdrive/C", "C:"),
+            ('/cygdrive/c', 'C:'),
+            ('/cygdrive/C', 'C:'),
         ]
 
-        self.assertEqual(expected_output, build.gen_cygpath.get_mounts(mount_output))
+        self.assertEqual(
+            expected_output, build.gen_cygpath.get_mounts(mount_output))
 
-    def testManyMounts(self) -> None:
-        mount_output = textwrap.dedent(
-            """\
+    def testManyMounts(self):
+        mount_output = textwrap.dedent("""\
             C:/cygwin/bin on /usr/bin type ntfs (binary,auto)
             C:/cygwin/lib on /usr/lib type ntfs (binary,auto)
             C:/cygwin on / type ntfs (binary,auto)
             C: on /cygdrive/c type ntfs (binary,posix=0,user,noumount,auto)
             D: on /cygdrive/d type udf (binary,posix=0,user,noumount,auto)
-            """
-        )
+            """)
 
         expected_output = [
-            ("/", "C:/cygwin"),
-            ("/usr/bin", "C:/cygwin/bin"),
-            ("/usr/lib", "C:/cygwin/lib"),
-            ("/cygdrive/c", "C:"),
-            ("/cygdrive/C", "C:"),
-            ("/cygdrive/d", "D:"),
-            ("/cygdrive/D", "D:"),
+            ('/', 'C:/cygwin'),
+            ('/usr/bin', 'C:/cygwin/bin'),
+            ('/usr/lib', 'C:/cygwin/lib'),
+            ('/cygdrive/c', 'C:'),
+            ('/cygdrive/C', 'C:'),
+            ('/cygdrive/d', 'D:'),
+            ('/cygdrive/D', 'D:'),
         ]
 
-        self.assertEqual(expected_output, build.gen_cygpath.get_mounts(mount_output))
+        self.assertEqual(
+            expected_output, build.gen_cygpath.get_mounts(mount_output))
 
 
 class MakeCygpathFunctionTest(unittest.TestCase):
-    def testSingleMount(self) -> None:
-        mounts = [("/", "C:/cygwin")]
-        expected_output = "$(patsubst /%,C:/cygwin/%,\n$1)"
+    def testSingleMount(self):
+        mounts = [('/', 'C:/cygwin')]
+        expected_output = '$(patsubst /%,C:/cygwin/%,\n$1)'
 
         self.assertEqual(
-            expected_output, build.gen_cygpath.make_cygpath_function(mounts)
-        )
+            expected_output, build.gen_cygpath.make_cygpath_function(mounts))
 
-    def testManyMounts(self) -> None:
+    def testManyMounts(self):
         mounts = [
-            ("/", "C:/cygwin"),
-            ("/usr/bin", "C:/cygwin/bin"),
-            ("/usr/lib", "C:/cygwin/lib"),
-            ("/cygdrive/c", "C:"),
-            ("/cygdrive/C", "C:"),
-            ("/cygdrive/d", "D:"),
-            ("/cygdrive/D", "D:"),
+            ('/', 'C:/cygwin'),
+            ('/usr/bin', 'C:/cygwin/bin'),
+            ('/usr/lib', 'C:/cygwin/lib'),
+            ('/cygdrive/c', 'C:'),
+            ('/cygdrive/C', 'C:'),
+            ('/cygdrive/d', 'D:'),
+            ('/cygdrive/D', 'D:'),
         ]
 
-        expected_output = textwrap.dedent(
-            """\
+        expected_output = textwrap.dedent("""\
             $(patsubst /%,C:/cygwin/%,
             $(patsubst /usr/bin/%,C:/cygwin/bin/%,
             $(patsubst /usr/lib/%,C:/cygwin/lib/%,
@@ -90,9 +87,7 @@
             $(patsubst /cygdrive/C/%,C:/%,
             $(patsubst /cygdrive/d/%,D:/%,
             $(patsubst /cygdrive/D/%,D:/%,
-            $1)))))))"""
-        )
+            $1)))))))""")
 
         self.assertEqual(
-            expected_output, build.gen_cygpath.make_cygpath_function(mounts)
-        )
+            expected_output, build.gen_cygpath.make_cygpath_function(mounts))
diff --git a/build/test_ldflags_to_sanitizers.py b/build/test_ldflags_to_sanitizers.py
index 1e0d91a..db195ea 100644
--- a/build/test_ldflags_to_sanitizers.py
+++ b/build/test_ldflags_to_sanitizers.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2018 The Android Open Source Project
 #
@@ -19,7 +19,7 @@
 import unittest
 
 try:
-    from StringIO import StringIO  # type: ignore
+    from StringIO import StringIO
 except ImportError:
     from io import StringIO
 
@@ -29,108 +29,76 @@
 
 
 class LdflagsToSanitizersTest(unittest.TestCase):
-    def test_sanitizers_from_args_no_sanitize_args(self) -> None:
+    def test_sanitizers_from_args_no_sanitize_args(self):
         """Tests that we don't identify sanitizers when there are none."""
         self.assertListEqual([], sanitizers_from_args([]))
-        self.assertListEqual([], sanitizers_from_args(["foo", "bar"]))
+        self.assertListEqual([], sanitizers_from_args(['foo', 'bar']))
 
-    def test_sanitizers_from_args_enabled_sanitizers(self) -> None:
+    def test_sanitizers_from_args_enabled_sanitizers(self):
         """Tests that we find enabled sanitizers."""
-        self.assertListEqual(["address"], sanitizers_from_args(["-fsanitize=address"]))
         self.assertListEqual(
-            ["address"], sanitizers_from_args(["-fsanitize=address", "foo"])
-        )
+            ['address'], sanitizers_from_args(['-fsanitize=address']))
         self.assertListEqual(
-            ["address", "undefined"],
-            sanitizers_from_args(["-fsanitize=address", "-fsanitize=undefined"]),
-        )
+            ['address'], sanitizers_from_args(['-fsanitize=address', 'foo']))
         self.assertListEqual(
-            ["address", "undefined"],
-            sanitizers_from_args(["-fsanitize=address,undefined"]),
-        )
+            ['address', 'undefined'],
+            sanitizers_from_args(
+                ['-fsanitize=address', '-fsanitize=undefined']))
         self.assertListEqual(
-            ["address", "undefined"],
-            sanitizers_from_args(["-fsanitize=address,undefined", "foo"]),
-        )
+            ['address', 'undefined'],
+            sanitizers_from_args(['-fsanitize=address,undefined']))
+        self.assertListEqual(
+            ['address', 'undefined'],
+            sanitizers_from_args(['-fsanitize=address,undefined', 'foo']))
 
-    def test_sanitizers_from_args_disabled_sanitizers(self) -> None:
+    def test_sanitizers_from_args_disabled_sanitizers(self):
         """Tests that we don't find disabled sanitizers."""
-        self.assertListEqual([], sanitizers_from_args(["-fno-sanitize=address"]))
-        self.assertListEqual([], sanitizers_from_args(["-fno-sanitize=address", "foo"]))
-        self.assertListEqual(
-            [],
-            sanitizers_from_args(["-fno-sanitize=address", "-fno-sanitize=undefined"]),
-        )
-        self.assertListEqual(
-            [], sanitizers_from_args(["-fno-sanitize=address,undefined"])
-        )
-        self.assertListEqual(
-            [], sanitizers_from_args(["-fno-sanitize=address,undefined", "foo"])
-        )
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fno-sanitize=address']))
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fno-sanitize=address', 'foo']))
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fno-sanitize=address', '-fno-sanitize=undefined']))
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fno-sanitize=address,undefined']))
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fno-sanitize=address,undefined', 'foo']))
 
-    def test_sanitizers_from_args_enabled_disabled_sanitizers(self) -> None:
+    def test_sanitizers_from_args_enabled_disabled_sanitizers(self):
         """Tests that we correctly identify only enabled sanitizers."""
-        self.assertListEqual(
-            [], sanitizers_from_args(["-fsanitize=address", "-fno-sanitize=address"])
-        )
-        self.assertListEqual(
-            ["address"],
-            sanitizers_from_args(
-                ["-fsanitize=address", "-fno-sanitize=address", "-fsanitize=address"]
-            ),
-        )
-        self.assertListEqual(
-            [],
-            sanitizers_from_args(
-                [
-                    "-fsanitize=address",
-                    "-fno-sanitize=address",
-                    "-fsanitize=address",
-                    "-fno-sanitize=address",
-                ]
-            ),
-        )
-        self.assertListEqual(
-            ["undefined"],
-            sanitizers_from_args(
-                ["-fsanitize=address,undefined", "-fno-sanitize=address"]
-            ),
-        )
-        self.assertListEqual(
-            ["undefined"],
-            sanitizers_from_args(
-                ["-fsanitize=address", "-fsanitize=undefined", "-fno-sanitize=address"]
-            ),
-        )
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fsanitize=address', '-fno-sanitize=address']))
+        self.assertListEqual(['address'], sanitizers_from_args(
+            ['-fsanitize=address', '-fno-sanitize=address',
+             '-fsanitize=address']))
+        self.assertListEqual([], sanitizers_from_args(
+            ['-fsanitize=address', '-fno-sanitize=address',
+             '-fsanitize=address', '-fno-sanitize=address']))
+        self.assertListEqual(['undefined'], sanitizers_from_args(
+            ['-fsanitize=address,undefined', '-fno-sanitize=address']))
+        self.assertListEqual(['undefined'], sanitizers_from_args(
+            ['-fsanitize=address', '-fsanitize=undefined',
+             '-fno-sanitize=address']))
 
-    def test_argv_to_module_arg_lists(self) -> None:
+    def test_argv_to_module_arg_lists(self):
         """Tests that modules' arguments are properly identified."""
         self.assertTupleEqual(([], []), argv_to_module_arg_lists([]))
-        self.assertTupleEqual((["foo"], []), argv_to_module_arg_lists(["foo"]))
+        self.assertTupleEqual((['foo'], []), argv_to_module_arg_lists(['foo']))
 
         self.assertTupleEqual(
-            ([], [["foo", "bar"], ["baz"]]),
-            argv_to_module_arg_lists(["--module", "foo", "bar", "--module", "baz"]),
-        )
+            ([], [['foo', 'bar'], ['baz']]),
+            argv_to_module_arg_lists(
+                ['--module', 'foo', 'bar', '--module', 'baz']))
 
         self.assertTupleEqual(
-            (["foo", "bar"], [["baz"]]),
-            argv_to_module_arg_lists(["foo", "bar", "--module", "baz"]),
-        )
+            (['foo', 'bar'], [['baz']]),
+            argv_to_module_arg_lists(['foo', 'bar', '--module', 'baz']))
 
-    def test_main(self) -> None:
+    def test_main(self):
         """Test that the program itself works."""
         sio = StringIO()
         ldflags_main(
-            [
-                "ldflags_to_sanitizers.py",
-                "-fsanitize=undefined",
-                "--module",
-                "-fsanitize=address,thread",
-                "-fno-sanitize=thread",
-                "--module",
-                "-fsanitize=undefined",
-            ],
-            sio,
-        )
-        self.assertEqual("address undefined", sio.getvalue().strip())
+            ['ldflags_to_sanitizers.py', '-fsanitize=undefined', '--module',
+             '-fsanitize=address,thread', '-fno-sanitize=thread',
+             '--module', '-fsanitize=undefined'], sio)
+        self.assertEqual('address undefined', sio.getvalue().strip())
diff --git a/build/tools/build-renderscript.py b/build/tools/build-renderscript.py
new file mode 100755
index 0000000..3b90a79
--- /dev/null
+++ b/build/tools/build-renderscript.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Packages the platform's RenderScript for the NDK."""
+import os
+import site
+import sys
+
+site.addsitedir(os.path.join(os.path.dirname(__file__), '../lib'))
+site.addsitedir(os.path.join(os.path.dirname(__file__), '../..'))
+
+# pylint: disable=import-error,wrong-import-position
+import build_support
+from ndk.hosts import Host, host_to_tag
+# pylint: enable=import-error,wrong-import-position
+
+
+def get_rs_prebuilt_path(host_tag: str) -> str:
+    rel_prebuilt_path = f'prebuilts/renderscript/host/{host_tag}'
+    prebuilt_path = os.path.join(build_support.android_path(),
+                                 rel_prebuilt_path)
+    if not os.path.isdir(prebuilt_path):
+        sys.exit(f'Could not find prebuilt RenderScript at {prebuilt_path}')
+    return prebuilt_path
+
+
+def main(args) -> None:
+    RS_VERSION = 'current'
+
+    host: Host = args.host
+    package_dir = args.dist_dir
+
+    os_name = args.host.value
+    if os_name == 'windows64':
+        os_name = 'windows'
+
+    prebuilt_path = get_rs_prebuilt_path(f'{os_name}-x86')
+    print(f'prebuilt path: {prebuilt_path}')
+
+    package_name = f'renderscript-toolchain-{host_to_tag(host)}'
+    built_path = os.path.join(prebuilt_path, RS_VERSION)
+    build_support.make_package(package_name, built_path, package_dir)
+
+
+if __name__ == '__main__':
+    build_support.run(main)
diff --git a/build/tools/build-shader-tools.py b/build/tools/build-shader-tools.py
new file mode 100755
index 0000000..716ed00
--- /dev/null
+++ b/build/tools/build-shader-tools.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Builds the glslc, spirv-as, spirv-dis, and spirv-val host executables."""
+
+from __future__ import print_function
+
+import os
+import site
+import shutil
+import subprocess
+
+site.addsitedir(os.path.join(os.path.dirname(__file__), '../lib'))
+site.addsitedir(os.path.join(os.path.dirname(__file__), '../..'))
+
+# pylint: disable=import-error,wrong-import-position
+import build_support
+from build_support import ArgParser
+import ndk.hosts
+# pylint: enable=import-error,wrong-import-position
+
+
+def main(args):
+    host_tag = build_support.host_to_tag(args.host)
+    build_host_tag = build_support.get_default_host().value + "-x86"
+
+    package_dir = args.dist_dir
+
+    # TODO(danalbert): use ndk/sources/third_party/googletest/googletest
+    # after it has been updated to a version with CMakeLists
+    gtest_dir = build_support.android_path('external', 'googletest')
+    gtest_cmd = f'-DSHADERC_GOOGLE_TEST_DIR={gtest_dir}'
+
+    # SPIRV-Tools tests require effcee and re2.
+    # Don't enable RE2 testing because it's long and not useful to us.
+    effcee_dir = build_support.android_path('external', 'effcee')
+    re2_dir = build_support.android_path('external', 'regex-re2')
+    effcee_args = [('-DSHADERC_EFFCEE_DIR=' + effcee_dir),
+                   ('-DSHADERC_RE2_DIR=' + re2_dir),
+                   ('-DEFFCEE_GOOGLETEST_DIR=' + gtest_dir),
+                   ('-DEFFCEE_RE2_DIR=' + re2_dir),
+                   ('-DRE2_BUILD_TESTING=OFF')]
+
+    obj_out = os.path.join(args.out_dir, 'shader_tools/obj')
+    install_dir = os.path.join(args.out_dir, 'shader_tools/install')
+
+    package_src = '-'.join([os.path.join(args.out_dir,
+                                         'shader_tools/shader-tools'),
+                            host_tag])
+    package_name = '-'.join(['shader-tools', host_tag])
+
+    source_root = build_support.android_path('external', 'shaderc')
+    shaderc_shaderc_dir = os.path.join(source_root, 'shaderc')
+    spirv_headers_dir = os.path.join(source_root, 'spirv-headers')
+
+    cmake = build_support.android_path('prebuilts', 'cmake',
+                                       build_host_tag, 'bin', 'cmake')
+    ctest = build_support.android_path('prebuilts', 'cmake',
+                                       build_host_tag, 'bin', 'ctest')
+    ninja = build_support.android_path('prebuilts', 'ninja',
+                                       build_host_tag, 'ninja')
+    file_extension = ''
+
+    additional_args = list(effcee_args)
+    if args.host.is_windows:
+        gtest_cmd = ''
+        mingw_root = os.path.join(build_support.android_path(),
+                                  'prebuilts', 'gcc', build_host_tag, 'host',
+                                  'x86_64-w64-mingw32-4.8')
+        mingw_compilers = os.path.join(mingw_root, 'bin', 'x86_64-w64-mingw32')
+        mingw_toolchain = os.path.join(source_root, 'shaderc',
+                                       'cmake', 'linux-mingw-toolchain.cmake')
+        gtest_root = build_support.android_path('external', 'googletest')
+        additional_args.extend(['-DCMAKE_TOOLCHAIN_FILE=' + mingw_toolchain,
+                                '-DMINGW_SYSROOT=' + mingw_root,
+                                '-DMINGW_COMPILER_PREFIX=' + mingw_compilers,
+                                '-DSHADERC_GOOGLE_TEST_DIR=' + gtest_root])
+        file_extension = '.exe'
+        if args.host == ndk.hosts.Host.Windows64:
+            additional_args.extend(
+                ['-DCMAKE_CXX_FLAGS=-fno-rtti -fno-exceptions'])
+        else:
+            additional_args.extend(
+                ['-DCMAKE_CXX_FLAGS=-m32 -fno-rtti -fno-exceptions',
+                 '-DCMAKE_C_FLAGS=-m32'])
+
+    for d in [package_src, obj_out, install_dir]:
+        try:
+            os.makedirs(d)
+        except:
+            pass
+
+    cmake_command = [cmake, '-GNinja', '-DCMAKE_MAKE_PROGRAM=' + ninja,
+                     '-DCMAKE_BUILD_TYPE=Release',
+                     '-DCMAKE_INSTALL_PREFIX=' + install_dir,
+                     '-DSHADERC_THIRD_PARTY_ROOT_DIR=' + source_root,
+                     '-DSPIRV-Headers_SOURCE_DIR=' + spirv_headers_dir,
+                     gtest_cmd,
+                     shaderc_shaderc_dir]
+
+    cmake_command.extend(additional_args)
+
+    subprocess.check_call(cmake_command, cwd=obj_out)
+    subprocess.check_call([cmake, '--build', obj_out, '--', '-v'])
+    subprocess.check_call([cmake, '--build', obj_out,
+                           '--target', 'install/strip'])
+
+    files_to_copy = ['glslc' + file_extension,
+                     'spirv-as' + file_extension,
+                     'spirv-dis' + file_extension,
+                     'spirv-val' + file_extension,
+                     'spirv-cfg' + file_extension,
+                     'spirv-opt' + file_extension,
+                     'spirv-link' + file_extension,
+                     'spirv-reduce' + file_extension]
+    scripts_to_copy = ['spirv-lesspipe.sh',]
+    files_to_copy.extend(scripts_to_copy)
+
+    # Test, except on windows.
+    if not args.host.is_windows:
+        subprocess.check_call([ctest, '--verbose'], cwd=obj_out)
+
+    # Copy to install tree.
+    for src in files_to_copy:
+        shutil.copy2(os.path.join(install_dir, 'bin', src),
+                     os.path.join(package_src, src))
+    if args.host.is_windows:
+        for src in scripts_to_copy:
+            # Convert line endings on scripts.
+            # Do it in place to preserve executable permissions.
+            subprocess.check_call(['unix2dos', '-o',
+                                   os.path.join(package_src, src)])
+
+    build_support.make_package(package_name, package_src, package_dir)
+
+if __name__ == '__main__':
+    build_support.run(main, ArgParser)
diff --git a/build/tools/builder-funcs.sh b/build/tools/builder-funcs.sh
new file mode 100644
index 0000000..32e48c0
--- /dev/null
+++ b/build/tools/builder-funcs.sh
@@ -0,0 +1,655 @@
+#
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#  This file contains various shell function definitions that can be
+#  used to either build a static and shared libraries from sources, or
+#  generate a Makefile to do it in parallel.
+#
+
+_BUILD_TAB=$(echo " " | tr ' ' '\t')
+
+builder_command ()
+{
+    if [ -z "$_BUILD_MK" ]; then
+        echo "$@"
+        "$@"
+    else
+        echo "${_BUILD_TAB}$@" >> $_BUILD_MK
+    fi
+}
+
+
+builder_log ()
+{
+    if [ "$_BUILD_MK" ]; then
+        echo "${_BUILD_TAB}echo $@" >> $_BUILD_MK
+    else
+        log "$@"
+    fi
+}
+
+# $1: Build directory
+# $2: Optional Makefile name
+builder_begin ()
+{
+    _BUILD_DIR_NEW=
+    _BUILD_DIR=$1
+    if [ ! -d "$_BUILD_DIR" ]; then
+        mkdir -p "$_BUILD_DIR"
+        fail_panic "Can't create build directory: $_BUILD_DIR"
+        _BUILD_DIR_NEW=true
+    else
+        rm -rf "$_BUILD_DIR/*"
+        fail_panic "Can't cleanup build directory: $_BUILD_DIR"
+    fi
+    _BUILD_TARGETS=
+    _BUILD_PREFIX=
+    _BUILD_MK=$2
+    if [ -n "$_BUILD_MK" ]; then
+        log "Creating temporary build Makefile: $_BUILD_MK"
+        rm -f $_BUILD_MK &&
+        echo "# Auto-generated by $0 - do not edit!" > $_BUILD_MK
+        echo ".PHONY: all" >> $_BUILD_MK
+        echo "all:" >> $_BUILD_MK
+    fi
+
+    builder_begin_module
+}
+
+# $1: Variable name
+# out: Variable value
+_builder_varval ()
+{
+    eval echo "\$$1"
+}
+
+_builder_varadd ()
+{
+    local _varname="$1"
+    local _varval="$(_builder_varval $_varname)"
+    shift
+    if [ -z "$_varval" ]; then
+        eval $_varname=\"$@\"
+    else
+        eval $_varname=\$$_varname\" $@\"
+    fi
+}
+
+
+builder_set_prefix ()
+{
+    _BUILD_PREFIX="$@"
+}
+
+builder_begin_module ()
+{
+    _BUILD_CC=
+    _BUILD_CXX=
+    _BUILD_AR=
+    _BUILD_C_INCLUDES=
+    _BUILD_CFLAGS=
+    _BUILD_CXXFLAGS=
+    _BUILD_LDFLAGS_BEGIN_SO=
+    _BUILD_LDFLAGS_END_SO=
+    _BUILD_LDFLAGS_BEGIN_EXE=
+    _BUILD_LDFLAGS_END_EXE=
+    _BUILD_LDFLAGS=
+    _BUILD_BINPREFIX=
+    _BUILD_DSTDIR=
+    _BUILD_SRCDIR=.
+    _BUILD_OBJECTS=
+    _BUILD_STATIC_LIBRARIES=
+    _BUILD_SHARED_LIBRARIES=
+    _BUILD_COMPILER_RUNTIME_LDFLAGS=-lgcc
+}
+
+builder_set_binprefix ()
+{
+    _BUILD_BINPREFIX=$1
+    _BUILD_CC=${1}gcc
+    _BUILD_CXX=${1}g++
+    _BUILD_AR=${1}ar
+}
+
+builder_set_binprefix_llvm ()
+{
+    _BUILD_BINPREFIX=$1
+    _BUILD_CC=${1}/clang
+    _BUILD_CXX=${1}/clang++
+    _BUILD_AR=${2}ar
+}
+
+builder_set_builddir ()
+{
+    _BUILD_DIR=$1
+}
+
+builder_set_srcdir ()
+{
+    _BUILD_SRCDIR=$1
+}
+
+builder_set_dstdir ()
+{
+    _BUILD_DSTDIR=$1
+}
+
+builder_ldflags ()
+{
+    _builder_varadd _BUILD_LDFLAGS "$@"
+}
+
+builder_ldflags_exe ()
+{
+    _builder_varadd _BUILD_LDFLAGS_EXE "$@"
+}
+
+builder_cflags ()
+{
+    _builder_varadd _BUILD_CFLAGS "$@"
+}
+
+builder_cxxflags ()
+{
+    _builder_varadd _BUILD_CXXFLAGS "$@"
+}
+
+builder_c_includes ()
+{
+    _builder_varadd _BUILD_C_INCLUDES "$@"
+}
+
+# $1: optional var to hold the original cflags before reset
+builder_reset_cflags ()
+{
+    local _varname="$1"
+    if [ -n "$_varname" ] ; then
+        eval $_varname=\"$_BUILD_CFLAGS\"
+    fi
+    _BUILD_CFLAGS=
+}
+
+# $1: optional var to hold the original cxxflags before reset
+builder_reset_cxxflags ()
+{
+    local _varname="$1"
+    if [ -n "$_varname" ] ; then
+        eval $_varname=\"$_BUILD_CXXFLAGS\"
+    fi
+    _BUILD_CXXFLAGS=
+}
+
+# $1: optional var to hold the original c_includes before reset
+builder_reset_c_includes ()
+{
+    local _varname="$1"
+    if [ -n "$_varname" ] ; then
+        eval $_varname=\"$_BUILD_C_INCLUDES\"
+    fi
+    _BUILD_C_INCLUDES=
+}
+
+builder_compiler_runtime_ldflags ()
+{
+    _BUILD_COMPILER_RUNTIME_LDFLAGS=$1
+}
+
+builder_link_with ()
+{
+    local LIB
+    for LIB; do
+        case $LIB in
+            *.a)
+                _builder_varadd _BUILD_STATIC_LIBRARIES $LIB
+                ;;
+            *.so)
+                _builder_varadd _BUILD_SHARED_LIBRARIES $LIB
+                ;;
+            *)
+                echo "ERROR: Unknown link library extension: $LIB"
+                exit 1
+        esac
+    done
+}
+
+builder_sources ()
+{
+    local src srcfull obj cc cflags text
+    if [ -z "$_BUILD_DIR" ]; then
+        panic "Build directory not set!"
+    fi
+    if [ -z "$_BUILD_CC" ]; then
+        _BUILD_CC=${CC:-gcc}
+    fi
+    if [ -z "$_BUILD_CXX" ]; then
+        _BUILD_CXX=${CXX:-g++}
+    fi
+    for src in "$@"; do
+        srcfull=$_BUILD_SRCDIR/$src
+        if [ ! -f "$srcfull" ]; then
+            echo "ERROR: Missing source file: $srcfull"
+            exit 1
+        fi
+        obj=$src
+        cflags=""
+        for inc in $_BUILD_C_INCLUDES; do
+            cflags=$cflags" -I$inc"
+        done
+        cflags=$cflags" -I$_BUILD_SRCDIR"
+        case $obj in
+            *.c)
+                obj=${obj%%.c}
+                text="C"
+                cc=$_BUILD_CC
+                cflags="$cflags $_BUILD_CFLAGS"
+                ;;
+            *.cpp)
+                obj=${obj%%.cpp}
+                text="C++"
+                cc=$_BUILD_CXX
+                cflags="$cflags $_BUILD_CXXFLAGS"
+                ;;
+            *.cc)
+                obj=${obj%%.cc}
+                text="C++"
+                cc=$_BUILD_CXX
+                cflags="$cflags $_BUILD_CXXFLAGS"
+                ;;
+            *.S|*.s)
+                obj=${obj%%.$obj}
+                text="ASM"
+                cc=$_BUILD_CC
+                cflags="$cflags $_BUILD_CFLAGS"
+                ;;
+            *)
+                echo "Unknown source file extension: $obj"
+                exit 1
+                ;;
+        esac
+
+        # Source file path can include ../ path items, ensure
+        # that the generated object do not back up the output
+        # directory by translating them to __/
+        obj=$(echo "$obj" | tr '../' '__/')
+
+        # Ensure we have unwind tables in the generated machine code
+        # This is useful to get good stack traces
+        cflags=$cflags" -funwind-tables"
+
+        obj=$_BUILD_DIR/$obj.o
+        if [ "$_BUILD_MK" ]; then
+            echo "$obj: $srcfull" >> $_BUILD_MK
+        fi
+        builder_log "${_BUILD_PREFIX}$text: $src"
+        builder_command mkdir -p $(dirname "$obj")
+        builder_command $cc -c -o "$obj" "$srcfull" $cflags
+        fail_panic "Could not compile ${_BUILD_PREFIX}$src"
+        _BUILD_OBJECTS=$_BUILD_OBJECTS" $obj"
+    done
+}
+
+builder_static_library ()
+{
+    local lib libname arflags
+    libname=$1
+    if [ -z "$_BUILD_DSTDIR" ]; then
+        panic "Destination directory not set"
+    fi
+    lib=$_BUILD_DSTDIR/$libname
+    lib=${lib%%.a}.a
+    if [ "$_BUILD_MK" ]; then
+        _BUILD_TARGETS=$_BUILD_TARGETS" $lib"
+        echo "$lib: $_BUILD_OBJECTS" >> $_BUILD_MK
+    fi
+    if [ -z "${_BUILD_AR}" ]; then
+        _BUILD_AR=${AR:-ar}
+    fi
+    builder_log "${_BUILD_PREFIX}Archive: $libname"
+    rm -f "$lib"
+    arflags="crs"
+    case $HOST_TAG in
+        darwin*)
+            # XCode 'ar' doesn't support D flag
+            ;;
+        *)
+            arflags="${arflags}D"
+            ;;
+    esac
+    builder_command ${_BUILD_AR} $arflags "$lib" "$_BUILD_OBJECTS"
+    fail_panic "Could not archive ${_BUILD_PREFIX}$libname objects!"
+}
+
+builder_host_static_library ()
+{
+    local lib libname
+    libname=$1
+    if [ -z "$_BUILD_DSTDIR" ]; then
+        panic "Destination directory not set"
+    fi
+    lib=$_BUILD_DSTDIR/$libname
+    lib=${lib%%.a}.a
+    if [ "$_BUILD_MK" ]; then
+        _BUILD_TARGETS=$_BUILD_TARGETS" $lib"
+        echo "$lib: $_BUILD_OBJECTS" >> $_BUILD_MK
+    fi
+    if [ -z "$BUILD_AR" ]; then
+        _BUILD_AR=${AR:-ar}
+    fi
+    builder_log "${_BUILD_PREFIX}Archive: $libname"
+    rm -f "$lib"
+    builder_command ${_BUILD_AR} crsD "$lib" "$_BUILD_OBJECTS"
+    fail_panic "Could not archive ${_BUILD_PREFIX}$libname objects!"
+}
+
+builder_shared_library ()
+{
+    local lib libname suffix
+    libname=$1
+    suffix=$2
+
+    if [ -z "$suffix" ]; then
+        suffix=".so"
+    fi
+    lib=$_BUILD_DSTDIR/$libname
+    lib=${lib%%${suffix}}${suffix}
+    if [ "$_BUILD_MK" ]; then
+        _BUILD_TARGETS=$_BUILD_TARGETS" $lib"
+        echo "$lib: $_BUILD_OBJECTS" >> $_BUILD_MK
+    fi
+    builder_log "${_BUILD_PREFIX}SharedLibrary: $libname"
+
+    # Important: -lgcc must appear after objects and static libraries,
+    #            but before shared libraries for Android. It doesn't hurt
+    #            for other platforms.
+    #            Also $libm must come before -lc because bionic libc
+    #            accidentally exports a soft-float version of ldexp.
+    builder_command ${_BUILD_CXX} \
+        -Wl,-soname,$(basename $lib) \
+        -Wl,-shared \
+        $_BUILD_LDFLAGS_BEGIN_SO \
+        $_BUILD_OBJECTS \
+        $_BUILD_STATIC_LIBRARIES \
+        $_BUILD_COMPILER_RUNTIME_LDFLAGS \
+        $_BUILD_SHARED_LIBRARIES \
+        -lm -lc \
+        $_BUILD_LDFLAGS \
+        $_BUILD_LDFLAGS_END_SO \
+        -o $lib
+    fail_panic "Could not create ${_BUILD_PREFIX}shared library $libname"
+}
+
+# Same as builder_shared_library, but do not link the default libs
+builder_nostdlib_shared_library ()
+{
+    local lib libname suffix
+    libname=$1
+    suffix=$2
+    if [ -z "$suffix" ]; then
+        suffix=".so"
+    fi
+    lib=$_BUILD_DSTDIR/$libname
+    lib=${lib%%${suffix}}${suffix}
+    if [ "$_BUILD_MK" ]; then
+        _BUILD_TARGETS=$_BUILD_TARGETS" $lib"
+        echo "$lib: $_BUILD_OBJECTS" >> $_BUILD_MK
+    fi
+    builder_log "${_BUILD_PREFIX}SharedLibrary: $libname"
+
+    builder_command ${_BUILD_CXX} \
+        -Wl,-soname,$(basename $lib) \
+        -Wl,-shared \
+        $_BUILD_LDFLAGS_BEGIN_SO \
+        $_BUILD_OBJECTS \
+        $_BUILD_STATIC_LIBRARIES \
+        $_BUILD_SHARED_LIBRARIES \
+        $_BUILD_LDFLAGS \
+        $_BUILD_LDFLAGS_END_SO \
+        -o $lib
+    fail_panic "Could not create ${_BUILD_PREFIX}shared library $libname"
+}
+
+builder_host_shared_library ()
+{
+    local lib libname
+    libname=$1
+    lib=$_BUILD_DSTDIR/$libname
+    lib=${lib%%.so}.so
+    if [ "$_BUILD_MK" ]; then
+        _BUILD_TARGETS=$_BUILD_TARGETS" $lib"
+        echo "$lib: $_BUILD_OBJECTS" >> $_BUILD_MK
+    fi
+    builder_log "${_BUILD_PREFIX}SharedLibrary: $libname"
+
+    if [ -z "$_BUILD_CXX" ]; then
+        _BUILD_CXX=${CXX:-g++}
+    fi
+
+    # Important: -lgcc must appear after objects and static libraries,
+    #            but before shared libraries for Android. It doesn't hurt
+    #            for other platforms.
+    builder_command ${_BUILD_CXX} \
+        -shared -s \
+        $_BUILD_OBJECTS \
+        $_BUILD_STATIC_LIBRARIES \
+        $_BUILD_SHARED_LIBRARIES \
+        $_BUILD_LDFLAGS \
+        -o $lib
+    fail_panic "Could not create ${_BUILD_PREFIX}shared library $libname"
+}
+
+builder_host_executable ()
+{
+    local exe exename
+    exename=$1
+    exe=$_BUILD_DSTDIR/$exename$HOST_EXE
+    if [ "$_BUILD_MK" ]; then
+        _BUILD_TARGETS=$_BUILD_TARGETS" $exe"
+        echo "$exe: $_BUILD_OBJECTS" >> $_BUILD_MK
+    fi
+    builder_log "${_BUILD_PREFIX}Executable: $exename$HOST_EXE"
+
+    if [ -z "$_BUILD_CXX" ]; then
+        _BUILD_CXX=${CXX:-g++}
+    fi
+
+    # Important: -lgcc must appear after objects and static libraries,
+    #            but before shared libraries for Android. It doesn't hurt
+    #            for other platforms.
+    builder_command ${_BUILD_CXX} \
+        -s \
+        $_BUILD_OBJECTS \
+        $_BUILD_STATIC_LIBRARIES \
+        $_BUILD_SHARED_LIBRARIES \
+        $_BUILD_LDFLAGS \
+        -o $exe
+    fail_panic "Could not create ${_BUILD_PREFIX}executable $libname"
+}
+
+
+builder_end ()
+{
+    if [ "$_BUILD_MK" ]; then
+        echo "all: $_BUILD_TARGETS" >> $_BUILD_MK
+        run make -j$NUM_JOBS -f $_BUILD_MK
+        fail_panic "Could not build project!"
+    fi
+
+    if [ "$_BUILD_DIR_NEW" ]; then
+        log "Cleaning up build directory: $_BUILD_DIR"
+        rm -rf "$_BUILD_DIR"
+        _BUILD_DIR_NEW=
+    fi
+}
+
+# Same as builder_begin, but to target Android with a specific ABI
+# $1: ABI name (e.g. armeabi)
+# $2: Build directory
+# $3: Gcc version
+# $4: Optional llvm version
+# $5: Optional Makefile name
+# $6: Platform (android-X)
+builder_begin_android ()
+{
+    local ABI BUILDDIR LLVM_VERSION MAKEFILE
+    local ARCH SYSROOT LDIR FLAGS
+    local CRTBEGIN_SO_O CRTEND_SO_O CRTBEGIN_EXE_SO CRTEND_SO_O
+    local BINPREFIX GCC_TOOLCHAIN LLVM_TRIPLE GCC_VERSION
+    local SCRATCH_FLAGS PLATFORM
+    local PREBUILT_NDK=$ANDROID_BUILD_TOP/prebuilts/ndk/current
+    if [ -z "$ANDROID_BUILD_TOP" ]; then
+        panic "ANDROID_BUILD_TOP is not defined!"
+    elif [ ! -d "$PREBUILT_NDK/platforms" ]; then
+        panic "Missing directory: $PREBUILT_NDK/platforms"
+    fi
+    ABI=$1
+    BUILDDIR=$2
+    GCC_VERSION=$3
+    LLVM_VERSION=$4
+    MAKEFILE=$5
+    ARCH=$(convert_abi_to_arch $ABI)
+    PLATFORM=$6
+
+    if [ -n "$LLVM_VERSION" ]; then
+        # override GCC_VERSION to pick $DEFAULT_LLVM_GCC??_VERSION instead
+        if [ "$ABI" != "${ABI%%64*}" ]; then
+            GCC_VERSION=$DEFAULT_LLVM_GCC64_VERSION
+        else
+            GCC_VERSION=$DEFAULT_LLVM_GCC32_VERSION
+        fi
+    fi
+    for TAG in $HOST_TAG $HOST_TAG32; do
+        BINPREFIX=$ANDROID_BUILD_TOP/prebuilts/ndk/current/$(get_toolchain_binprefix_for_arch $ARCH $GCC_VERSION $TAG)
+        if [ -f ${BINPREFIX}gcc ]; then
+            break;
+        fi
+    done
+    if [ -n "$LLVM_VERSION" ]; then
+        GCC_TOOLCHAIN=`dirname $BINPREFIX`
+        GCC_TOOLCHAIN=`dirname $GCC_TOOLCHAIN`
+        LLVM_BINPREFIX=$(get_llvm_toolchain_binprefix $TAG)
+    fi
+
+    if [ -z "$PLATFORM" ]; then
+      SYSROOT=$PREBUILT_NDK/$(get_default_platform_sysroot_for_arch $ARCH)
+    else
+      SYSROOT=$PREBUILT_NDK/platforms/$PLATFORM/arch-$ARCH
+    fi
+    LDIR=$SYSROOT"/usr/"$(get_default_libdir_for_abi $ABI)
+
+    CRTBEGIN_EXE_O=$LDIR/crtbegin_dynamic.o
+    CRTEND_EXE_O=$LDIR/crtend_android.o
+
+    CRTBEGIN_SO_O=$LDIR/crtbegin_so.o
+    CRTEND_SO_O=$LDIR/crtend_so.o
+    if [ ! -f "$CRTBEGIN_SO_O" ]; then
+        CRTBEGIN_SO_O=$CRTBEGIN_EXE_O
+    fi
+    if [ ! -f "$CRTEND_SO_O" ]; then
+        CRTEND_SO_O=$CRTEND_EXE_O
+    fi
+
+    builder_begin "$BUILDDIR" "$MAKEFILE"
+    builder_set_prefix "$ABI "
+    if [ -z "$LLVM_VERSION" ]; then
+        builder_set_binprefix "$BINPREFIX"
+    else
+        builder_set_binprefix_llvm "$LLVM_BINPREFIX" "$BINPREFIX"
+        case $ABI in
+            armeabi)
+                LLVM_TRIPLE=armv5te-none-linux-androideabi
+                ;;
+            armeabi-v7a)
+                LLVM_TRIPLE=armv7-none-linux-androideabi
+                ;;
+            arm64-v8a)
+                LLVM_TRIPLE=aarch64-none-linux-android
+                ;;
+            x86)
+                LLVM_TRIPLE=i686-none-linux-android
+                ;;
+            x86_64)
+                LLVM_TRIPLE=x86_64-none-linux-android
+                ;;
+            mips|mips32r6)
+                LLVM_TRIPLE=mipsel-none-linux-android
+                ;;
+            mips64)
+                LLVM_TRIPLE=mips64el-none-linux-android
+                ;;
+        esac
+        SCRATCH_FLAGS="-target $LLVM_TRIPLE $FLAGS"
+        builder_ldflags "$SCRATCH_FLAGS"
+        builder_cflags  "$SCRATCH_FLAGS"
+        builder_cxxflags "$SCRATCH_FLAGS"
+        if [ ! -z $GCC_TOOLCHAIN ]; then
+            SCRATCH_FLAGS="-gcc-toolchain $GCC_TOOLCHAIN"
+            builder_cflags "$SCRATCH_FLAGS"
+            builder_cxxflags "$SCRATCH_FLAGS"
+            builder_ldflags "$SCRATCH_FLAGS"
+            if [ "$ABI" = "mips" ]; then
+              # Help clang use mips64el multilib GCC
+              SCRATCH_FLAGS="-L${GCC_TOOLCHAIN}/lib/gcc/mips64el-linux-android/4.9.x/32/mips-r1 "
+              builder_ldflags "$SCRATCH_FLAGS"
+            fi
+        fi
+    fi
+
+    SCRATCH_FLAGS="--sysroot=$SYSROOT"
+    builder_cflags "$SCRATCH_FLAGS"
+    builder_cxxflags "$SCRATCH_FLAGS"
+
+    SCRATCH_FLAGS="--sysroot=$SYSROOT -nostdlib"
+    _BUILD_LDFLAGS_BEGIN_SO="$SCRATCH_FLAGS $CRTBEGIN_SO_O"
+    _BUILD_LDFLAGS_BEGIN_EXE="$SCRATCH_FLAGS $CRTBEGIN_EXE_O"
+
+    _BUILD_LDFLAGS_END_SO="$CRTEND_SO_O"
+    _BUILD_LDFLAGS_END_EXE="$CRTEND_EXE_O"
+
+    case $ABI in
+        armeabi)
+            if [ -z "$LLVM_VERSION" ]; then
+                # add -minline-thumb1-jumptable such that gabi++/stlport/libc++ can be linked
+                # with compiler-rt where helpers __gnu_thumb1_case_* (in libgcc.a) don't exist
+                SCRATCH_FLAGS="-minline-thumb1-jumptable"
+                builder_cflags "$SCRATCH_FLAGS"
+                builder_cxxflags "$SCRATCH_FLAGS"
+            else
+                builder_cflags ""
+                builder_cxxflags ""
+            fi
+            ;;
+        armeabi-v7a)
+            SCRATCH_FLAGS="-march=armv7-a -mfpu=vfpv3-d16 -mfloat-abi=softfp"
+            builder_cflags "$SCRATCH_FLAGS"
+            builder_cxxflags "$SCRATCH_FLAGS"
+            builder_ldflags "-march=armv7-a"
+            ;;
+        mips)
+            SCRATCH_FLAGS="-mips32"
+            builder_cflags "$SCRATCH_FLAGS"
+            builder_cxxflags "$SCRATCH_FLAGS"
+            builder_ldflags "-mips32"
+            ;;
+    esac
+}
+
+# $1: Build directory
+# $2: Optional Makefile name
+builder_begin_host ()
+{
+    prepare_host_build
+    builder_begin "$1" "$2"
+    builder_set_prefix "$HOST_TAG "
+}
diff --git a/build/tools/common-build-host-funcs.sh b/build/tools/common-build-host-funcs.sh
new file mode 100644
index 0000000..5088ab5
--- /dev/null
+++ b/build/tools/common-build-host-funcs.sh
@@ -0,0 +1,791 @@
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# A set of function shared by the 'build-host-xxxx.sh' scripts.
+# They are mostly related to building host libraries.
+#
+# NOTE: This script uses various prefixes:
+#
+#    BH_       Used for public macros
+#    bh_       Use for public functions
+#
+#    _BH_      Used for private macros
+#    _bh_      Used for private functions
+#
+# Callers should only rely on the public macros and functions defined here.
+#
+
+# List of macros defined by the functions here:
+#
+#   defined by 'bh_set_build_tag'
+#
+#   BH_BUILD_CONFIG     Generic GNU config triplet for build system
+#   BH_BUILD_OS         NDK system name
+#   BH_BUILD_ARCH       NDK arch name
+#   BH_BUILD_TAG        NDK system tag ($OS-$ARCH)
+#   BH_BUILD_BITS       build system bitness (32 or 64)
+#
+#   defined by 'bh_set_host_tag'
+#                          7
+#   BH_HOST_CONFIG
+#   BH_HOST_OS
+#   BH_HOST_ARCH
+#   BH_HOST_TAG
+#   BH_HOST_BITS
+#
+#   defined by 'bh_set_target_tag'
+#
+#   BH_TARGET_CONFIG
+#   BH_TARGET_OS
+#   BH_TARGET_ARCH
+#   BH_TARGET_TAG
+#   BH_TARGET_BITS
+#
+#
+
+
+# The values of HOST_OS/ARCH/TAG will be redefined during the build to
+# match those of the system the generated compiler binaries will run on.
+#
+# Save the original ones into BUILD_XXX variants, corresponding to the
+# machine where the build happens.
+#
+BH_BUILD_OS=$HOST_OS
+BH_BUILD_ARCH=$HOST_ARCH
+BH_BUILD_TAG=$HOST_TAG
+
+# Map an NDK system tag to an OS name
+# $1: system tag (e.g. linux-x86)
+# Out: system name (e.g. linux)
+bh_tag_to_os ()
+{
+    local RET
+    case $1 in
+        android-*) RET="android";;
+        linux-*) RET="linux";;
+        darwin-*) RET="darwin";;
+        windows|windows-*) RET="windows";;
+        *) echo "ERROR: Unknown tag $1" >&2; echo "INVALID"; exit 1;;
+    esac
+    echo $RET
+}
+
+# Map an NDK system tag to an architecture name
+# $1: system tag (e.g. linux-x86)
+# Out: arch name (e.g. x86)
+bh_tag_to_arch ()
+{
+    local RET
+    case $1 in
+        *-arm) RET=arm;;
+        *-arm64) RET=arm64;;
+        *-mips) RET=mips;;
+        *-mips64) RET=mips64;;
+        windows|*-x86) RET=x86;;
+        *-x86_64) RET=x86_64;;
+        *) echo "ERROR: Unknown tag $1" >&2; echo "INVALID"; exit 1;;
+    esac
+    echo $RET
+}
+
+# Map an NDK system tag to a bit number
+# $1: system tag (e.g. linux-x86)
+# Out: bit number (32 or 64)
+bh_tag_to_bits ()
+{
+    local RET
+    case $1 in
+        windows|*-x86|*-arm|*-mips) RET=32;;
+        *-x86_64|*-arm64|*-mips64) RET=64;;
+        *) echo "ERROR: Unknown tag $1" >&2; echo "INVALID"; exit 1;;
+    esac
+    echo $RET
+}
+
+# Map an NDK system tag to the corresponding GNU configuration triplet.
+# $1: NDK system tag
+# Out: GNU configuration triplet
+bh_tag_to_config_triplet ()
+{
+    local RET
+    case $1 in
+        linux-x86) RET=i686-linux-gnu;;
+        linux-x86_64) RET=x86_64-linux-gnu;;
+        darwin-x86) RET=i686-apple-darwin;;
+        darwin-x86_64) RET=x86_64-apple-darwin;;
+        windows|windows-x86) RET=i686-w64-mingw32;;
+        windows-x86_64) RET=x86_64-w64-mingw32;;
+        android-arm) RET=arm-linux-androideabi;;
+        android-arm64) RET=aarch64-linux-android;;
+        android-x86) RET=i686-linux-android;;
+        android-x86_64) RET=x86_64-linux-android;;
+        android-mips) RET=mipsel-linux-android;;
+        android-mips64) RET=mips64el-linux-android;;
+        *) echo "ERROR: Unknown tag $1" >&2; echo "INVALID"; exit 1;;
+    esac
+    echo "$RET"
+}
+
+
+bh_set_build_tag ()
+{
+  SAVED_OPTIONS=$(set +o)
+  set -e
+  BH_BUILD_OS=$(bh_tag_to_os $1)
+  BH_BUILD_ARCH=$(bh_tag_to_arch $1)
+  BH_BUILD_BITS=$(bh_tag_to_bits $1)
+  BH_BUILD_TAG=$BH_BUILD_OS-$BH_BUILD_ARCH
+  BH_BUILD_CONFIG=$(bh_tag_to_config_triplet $1)
+  eval "$SAVED_OPTIONS"
+}
+
+# Set default BH_BUILD macros.
+bh_set_build_tag $HOST_TAG
+
+bh_set_host_tag ()
+{
+  SAVED_OPTIONS=$(set +o)
+  set -e
+  BH_HOST_OS=$(bh_tag_to_os $1)
+  BH_HOST_ARCH=$(bh_tag_to_arch $1)
+  BH_HOST_BITS=$(bh_tag_to_bits $1)
+  BH_HOST_TAG=$BH_HOST_OS-$BH_HOST_ARCH
+  BH_HOST_CONFIG=$(bh_tag_to_config_triplet $1)
+  eval "$SAVED_OPTIONS"
+}
+
+bh_set_target_tag ()
+{
+  SAVED_OPTIONS=$(set +o)
+  set -e
+  BH_TARGET_OS=$(bh_tag_to_os $1)
+  BH_TARGET_ARCH=$(bh_tag_to_arch $1)
+  BH_TARGET_BITS=$(bh_tag_to_bits $1)
+  BH_TARGET_TAG=$BH_TARGET_OS-$BH_TARGET_ARCH
+  BH_TARGET_CONFIG=$(bh_tag_to_config_triplet $1)
+  eval "$SAVED_OPTIONS"
+}
+
+bh_sort_systems_build_first ()
+{
+  local IN_SYSTEMS="$1"
+  local OUT_SYSTEMS
+  # Pull out the host if there
+  for IN_SYSTEM in $IN_SYSTEMS; do
+    if [ "$IN_SYSTEM" = "$BH_BUILD_TAG" ]; then
+        OUT_SYSTEMS=$IN_SYSTEM
+    fi
+  done
+  # Append the rest
+  for IN_SYSTEM in $IN_SYSTEMS; do
+    if [ ! "$IN_SYSTEM" = "$BH_BUILD_TAG" ]; then
+        OUT_SYSTEMS=$OUT_SYSTEMS" $IN_SYSTEM"
+    fi
+  done
+  echo $OUT_SYSTEMS
+}
+
+# $1 is the string to search for
+# $2... is the list to search in
+# Returns first, yes or no.
+bh_list_contains ()
+{
+  local SEARCH="$1"
+  shift
+  # For dash, this has to be split over 2 lines.
+  # Seems to be a bug with dash itself:
+  # https://bugs.launchpad.net/ubuntu/+source/dash/+bug/141481
+  local LIST
+  LIST=$@
+  local RESULT=first
+  # Pull out the host if there
+  for ELEMENT in $LIST; do
+    if [ "$ELEMENT" = "$SEARCH" ]; then
+      echo $RESULT
+      return 0
+    fi
+    RESULT=yes
+  done
+  echo no
+  return 1
+}
+
+
+# Use this function to enable/disable colored output
+# $1: 'true' or 'false'
+bh_set_color_mode ()
+{
+  local DO_COLOR=
+  case $1 in
+    on|enable|true) DO_COLOR=true
+    ;;
+  esac
+  if [ "$DO_COLOR" ]; then
+    _BH_COLOR_GREEN="\033[32m"
+    _BH_COLOR_PURPLE="\033[35m"
+    _BH_COLOR_CYAN="\033[36m"
+    _BH_COLOR_END="\033[0m"
+  else
+    _BH_COLOR_GREEN=
+    _BH_COLOR_PURPLE=
+    _BH_COLOR_CYAN=
+    _BH_COLOR_END=
+  fi
+}
+
+# By default, enable color mode
+bh_set_color_mode true
+
+# Pretty printing with colors!
+bh_host_text ()
+{
+    printf "[${_BH_COLOR_GREEN}${BH_HOST_TAG}${_BH_COLOR_END}]"
+}
+
+bh_toolchain_text ()
+{
+    printf "[${_BH_COLOR_PURPLE}${BH_TOOLCHAIN}${_BH_COLOR_END}]"
+}
+
+bh_target_text ()
+{
+    printf "[${_BH_COLOR_CYAN}${BH_TARGET_TAG}${_BH_COLOR_END}]"
+}
+
+bh_arch_text ()
+{
+    # Print arch name in cyan
+    printf "[${_BH_COLOR_CYAN}${BH_TARGET_ARCH}${_BH_COLOR_END}]"
+}
+
+# Check that a given compiler generates code correctly
+#
+# This is to detect bad/broken toolchains, e.g. amd64-mingw32msvc
+# is totally broken on Ubuntu 10.10 and 11.04
+#
+# $1: compiler
+# $2: optional extra flags
+#
+bh_check_compiler ()
+{
+    local CC="$1"
+    local TMPC=$TMPDIR/build-host-$USER-$$.c
+    local TMPE=${TMPC%%.c}
+    local TMPL=$TMPC.log
+    local RET
+    shift
+    cat > $TMPC <<EOF
+int main(void) { return 0; }
+EOF
+    log_n "Checking compiler code generation ($CC)... "
+    $CC -o $TMPE $TMPC "$@" >$TMPL 2>&1
+    RET=$?
+    rm -f $TMPC $TMPE $TMPL
+    if [ "$RET" = 0 ]; then
+        log "yes"
+    else
+        log "no"
+    fi
+    return $RET
+}
+
+
+# $1: toolchain install dir
+# $2: toolchain prefix, no trailing dash (e.g. arm-linux-androideabi)
+# $3: optional -m32 or -m64.
+_bh_try_host_fullprefix ()
+{
+    local PREFIX="$1/bin/$2"
+    shift; shift;
+    if [ -z "$HOST_FULLPREFIX" ]; then
+        local GCC="$PREFIX-gcc"
+        if [ -f "$GCC" ]; then
+            if bh_check_compiler "$GCC" "$@"; then
+                HOST_FULLPREFIX="${GCC%%gcc}"
+                dump "$(bh_host_text) Using host gcc: $GCC $@"
+            else
+                dump "$(bh_host_text) Ignoring broken host gcc: $GCC $@"
+            fi
+        fi
+    fi
+}
+
+# $1: host prefix, no trailing slash (e.g. i686-linux-android)
+# $2: optional compiler args (should be empty, -m32 or -m64)
+_bh_try_host_prefix ()
+{
+    local PREFIX="$1"
+    shift
+    if [ -z "$HOST_FULLPREFIX" ]; then
+        local GCC="$(which $PREFIX-gcc 2>/dev/null)"
+        if [ "$GCC" -a -e "$GCC" ]; then
+            if bh_check_compiler "$GCC" "$@"; then
+                HOST_FULLPREFIX=${GCC%%gcc}
+                dump "$(bh_host_text) Using host gcc: ${HOST_FULLPREFIX}gcc $@"
+            else
+                dump "$(bh_host_text) Ignoring broken host gcc: $GCC $@"
+            fi
+        fi
+    fi
+}
+
+# Used to determine the minimum possible Darwin version that a Darwin SDK
+# can target. This actually depends from the host architecture.
+# $1: Host architecture name
+# out: SDK version number (e.g. 10.4 or 10.5)
+_bh_darwin_arch_to_min_version ()
+{
+  if [ "$1" = "x86" ]; then
+    echo "10.4"
+  else
+    echo "10.5"
+  fi
+}
+
+# Use the check for the availability of a compatibility SDK in Darwin
+# this can be used to generate binaries compatible with either Tiger or
+# Leopard.
+#
+# $1: SDK root path
+# $2: Darwin compatibility minimum version
+_bh_check_darwin_sdk ()
+{
+    if [ -d "$1" -a -z "$HOST_CFLAGS" ] ; then
+        HOST_CFLAGS="-isysroot $1 -mmacosx-version-min=$2 -DMAXOSX_DEPLOYEMENT_TARGET=$2"
+        HOST_CXXFLAGS=$HOST_CFLAGS
+        HOST_LDFLAGS="-syslibroot $1 -mmacosx-version-min=$2"
+        dump "Generating $2-compatible binaries."
+        return 0  # success
+    fi
+    return 1
+}
+
+# Check that a given compiler generates 32 or 64 bit code.
+# $1: compiler full path (.e.g  /path/to/fullprefix-gcc)
+# $2: 32 or 64
+# $3: extract compiler flags
+# Return: success iff the compiler generates $2-bits code
+_bh_check_compiler_bitness ()
+{
+    local CC="$1"
+    local BITS="$2"
+    local TMPC=$TMPDIR/build-host-gcc-bits-$USER-$$.c
+    local TMPL=$TMPC.log
+    local RET
+    shift; shift;
+    cat > $TMPC <<EOF
+/* this program will fail to compile if the compiler doesn't generate BITS-bits code */
+int tab[1-2*(sizeof(void*)*8 != BITS)];
+EOF
+    dump_n "$(bh_host_text) Checking that the compiler generates $BITS-bits code ($@)... "
+    $CC -c -DBITS=$BITS -o /dev/null $TMPC $HOST_CFLAGS "$@" > $TMPL 2>&1
+    RET=$?
+    rm -f $TMPC $TMPL
+    if [ "$RET" = 0 ]; then
+        dump "yes"
+    else
+        dump "no"
+    fi
+    return $RET
+}
+
+# This function probes the system to find the best toolchain or cross-toolchain
+# to build binaries that run on a given host system. After that, it generates
+# a wrapper toolchain under $2 with a prefix of ${BH_HOST_CONFIG}-
+# where $BH_HOST_CONFIG is a GNU configuration name.
+#
+# Important: this script might redefine $BH_HOST_CONFIG to a different value!
+# (This behavior previously happened with MinGW, but doesn't anymore.)
+#
+# $1: NDK system tag (e.g. linux-x86)
+#
+# The following can be defined, otherwise they'll be auto-detected and set.
+#
+#  DARWIN_MIN_VERSION   -> Darwmin minimum compatibility version
+#  DARWIN_SDK_VERSION   -> Darwin SDK version
+#
+# The following can be defined for extra features:
+#
+#  DARWIN_TOOLCHAIN     -> Path to Darwin cross-toolchain (cross-compile only).
+#  DARWIN_SYSROOT       -> Path to Darwin SDK sysroot (cross-compile only).
+#  ANDROID_NDK_ROOT     -> Top-level NDK directory, for automatic probing
+#                          of prebuilt platform toolchains.
+#
+_bh_select_toolchain_for_host ()
+{
+    local HOST_CFLAGS HOST_CXXFLAGS HOST_LDFLAGS
+    local HOST_ASFLAGS HOST_WINDRES_FLAGS
+    local HOST_FULLPREFIX
+    local DARWIN_ARCH DARWIN_SDK_SUBDIR
+
+    # We do all the complex auto-detection magic in the setup phase,
+    # then save the result in host-specific global variables.
+    #
+    # In the build phase, we will simply restore the values into the
+    # global HOST_FULLPREFIX / HOST_BUILD_DIR
+    # variables.
+    #
+
+    # Try to find the best toolchain to do that job, assuming we are in
+    # a full Android platform source checkout, we can look at the prebuilts/
+    # directory.
+    case $1 in
+        linux-x86)
+            panic "Sorry, this script does not support building 32-bit Linux binaries."
+            ;;
+
+        linux-x86_64)
+            local LINUX_GLIBC_PREBUILT=x86_64-linux-glibc2.15-4.8
+            _bh_try_host_fullprefix "$(dirname $ANDROID_NDK_ROOT)/prebuilts/gcc/linux-x86/host/$LINUX_GLIBC_PREBUILT" x86_64-linux
+            if [ -z "$HOST_FULLPREFIX" ]; then
+                dump "Cannot find the x86_64 Linux-targeting compiler. Make sure the"
+                dump "$LINUX_GLIBC_PREBUILT prebuilt is checked out."
+                exit 1
+            fi
+            ;;
+
+        darwin-*)
+            DARWIN_ARCH=$(bh_tag_to_arch $1)
+            if [ -z "$DARWIN_MIN_VERSION" ]; then
+                DARWIN_MIN_VERSION=$(_bh_darwin_arch_to_min_version $DARWIN_ARCH)
+            fi
+            case $BH_BUILD_OS in
+                darwin)
+                    if [ "$DARWIN_SDK_VERSION" ]; then
+                        # Compute SDK subdirectory name
+                        case $DARWIN_SDK_VERSION in
+                            10.4) DARWIN_SDK_SUBDIR=$DARWIN_SDK.sdku;;
+                            *) DARWIN_SDK_SUBDIR=$DARWIN_SDK.sdk;;
+                        esac
+                        # Since xCode moved to the App Store the SDKs have been 'sandboxed' into the Xcode.app folder.
+                        _bh_check_darwin_sdk /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX$DARWIN_SDK_SUBDIR $DARWIN_MIN_VERSION
+                        _bh_check_darwin_sdk /Developer/SDKs/MacOSX$DARWIN_SDK_SUBDIR $DARWIN_MIN_VERSION
+                    else
+                        # Since xCode moved to the App Store the SDKs have been 'sandboxed' into the Xcode.app folder.
+                        _bh_check_darwin_sdk /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.7.sdk $DARWIN_MIN_VERSION
+                        _bh_check_darwin_sdk /Developer/SDKs/MacOSX10.7.sdk  $DARWIN_MIN_VERSION
+                        _bh_check_darwin_sdk /Developer/SDKs/MacOSX10.6.sdk  $DARWIN_MIN_VERSION
+                        # NOTE: The 10.5.sdk on Lion is buggy and cannot build basic C++ programs
+                        #_bh_check_darwin_sdk /Developer/SDKs/MacOSX10.5.sdk  $DARWIN_ARCH
+                        # NOTE: The 10.4.sdku is not available anymore and could not be tested.
+                        #_bh_check_darwin_sdk /Developer/SDKs/MacOSX10.4.sdku $DARWIN_ARCH
+                    fi
+                    if [ -z "$HOST_CFLAGS" ]; then
+                        local version="$(sw_vers -productVersion)"
+                        log "Generating $version-compatible binaries!"
+                    fi
+                    ;;
+                *)
+                    if [ -z "$DARWIN_TOOLCHAIN" -o -z "$DARWIN_SYSROOT" ]; then
+                        dump "If you want to build Darwin binaries on a non-Darwin machine,"
+                        dump "Please define DARWIN_TOOLCHAIN to name it, and DARWIN_SYSROOT to point"
+                        dump "to the SDK. For example:"
+                        dump ""
+                        dump "   DARWIN_TOOLCHAIN=\"i686-apple-darwin11\""
+                        dump "   DARWIN_SYSROOT=\"~/darwin-cross/MacOSX10.7.sdk\""
+                        dump "   export DARWIN_TOOLCHAIN DARWIN_SYSROOT"
+                        dump ""
+                        exit 1
+                    fi
+                    _bh_check_darwin_sdk $DARWIN_SYSROOT $DARWIN_MIN_VERSION
+                    _bh_try_host_prefix "$DARWIN_TOOLCHAIN" -m$(bh_tag_to_bits $1) --sysroot "$DARWIN_SYSROOT"
+                    if [ -z "$HOST_FULLPREFIX" ]; then
+                        dump "It looks like $DARWIN_TOOLCHAIN-gcc is not in your path, or does not work correctly!"
+                        exit 1
+                    fi
+                    dump "Using darwin cross-toolchain: ${HOST_FULLPREFIX}gcc"
+                    ;;
+            esac
+            ;;
+
+        windows|windows-x86)
+            case $BH_BUILD_OS in
+                linux)
+                    # Prefer the prebuilt cross-compiler.
+                    _bh_try_host_fullprefix "$(dirname $ANDROID_NDK_ROOT)/prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8" x86_64-w64-mingw32 -m32
+                    # We favor these because they are more recent, and because
+                    # we have a script to rebuild them from scratch. See
+                    # build-mingw64-toolchain.sh. Typically provided by the
+                    # 'mingw-w64' package on Debian and Ubuntu systems.
+                    _bh_try_host_prefix i686-w64-mingw32
+                    _bh_try_host_prefix x86_64-w64-mingw32 -m32
+                    # Special note for Fedora: this distribution used
+                    # to have a mingw32-gcc package that provided a 32-bit
+                    # only cross-toolchain named i686-pc-mingw32.
+                    # Later versions of the distro now provide a new package
+                    # named mingw-gcc which provides i686-w64-mingw32 and
+                    # x86_64-w64-mingw32 instead.
+                    if [ -z "$HOST_FULLPREFIX" ]; then
+                        dump "There is no Windows cross-compiler. Ensure that you"
+                        dump "have one of these installed and in your path:"
+                        dump "   i686-w64-mingw32-gcc    (see build-mingw64-toolchain.sh)"
+                        dump "   x86_64-w64-mingw32-gcc  (see build-mingw64-toolchain.sh)"
+                        dump ""
+                        exit 1
+                    fi
+                    if [ "$BH_HOST_CONFIG" != i686-w64-mingw32 ]; then
+                        panic "Unexpected value of BH_HOST_CONFIG: $BH_HOST_CONFIG"
+                    fi
+                    # If the 32-bit wrappers call a 64-bit toolchain, add flags
+                    # to default ld/as/windres to 32 bits.
+                    case "$HOST_FULLPREFIX" in
+                        *x86_64-w64-mingw32-)
+                            HOST_LDFLAGS="-m i386pe"
+                            HOST_ASFLAGS="--32"
+                            HOST_WINDRES_FLAGS="-F pe-i386"
+                            ;;
+                        *)
+                            ;;
+                    esac
+                    ;;
+                *) panic "Sorry, this script only supports building windows binaries on Linux."
+                ;;
+            esac
+            HOST_CFLAGS=$HOST_CFLAGS" -D__USE_MINGW_ANSI_STDIO=1"
+            HOST_CXXFLAGS=$HOST_CXXFLAGS" -D__USE_MINGW_ANSI_STDIO=1"
+            ;;
+
+        windows-x86_64)
+            case $BH_BUILD_OS in
+                linux)
+                    # Prefer the prebuilt cross-compiler.
+                    # See comments above for windows-x86.
+                    _bh_try_host_fullprefix "$(dirname $ANDROID_NDK_ROOT)/prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8" x86_64-w64-mingw32
+                    _bh_try_host_prefix x86_64-w64-mingw32
+                    if [ -z "$HOST_FULLPREFIX" ]; then
+                        dump "There is no Windows cross-compiler in your path. Ensure you"
+                        dump "have one of these installed and in your path:"
+                        dump "   x86_64-w64-mingw32-gcc  (see build-mingw64-toolchain.sh)"
+                        dump ""
+                        exit 1
+                    fi
+                    if [ "$BH_HOST_CONFIG" != x86_64-w64-mingw32 ]; then
+                        panic "Unexpected value of BH_HOST_CONFIG: $BH_HOST_CONFIG"
+                    fi
+                    ;;
+
+                *) panic "Sorry, this script only supports building windows binaries on Linux."
+                ;;
+            esac
+            HOST_CFLAGS=$HOST_CFLAGS" -D__USE_MINGW_ANSI_STDIO=1"
+            HOST_CXXFLAGS=$HOST_CXXFLAGS" -D__USE_MINGW_ANSI_STDIO=1"
+            ;;
+    esac
+
+    # Determine the default bitness of our compiler. It it doesn't match
+    # HOST_BITS, tries to see if it supports -m32 or -m64 to change it.
+    if ! _bh_check_compiler_bitness ${HOST_FULLPREFIX}gcc $BH_HOST_BITS; then
+        local TRY_CFLAGS
+        case $BH_HOST_BITS in
+            32) TRY_CFLAGS=-m32;;
+            64) TRY_CFLAGS=-m64;;
+        esac
+        if ! _bh_check_compiler_bitness ${HOST_FULLPREFIX}gcc $BH_HOST_BITS $TRY_CFLAGS; then
+            panic "Can't find a way to generate $BH_HOST_BITS binaries with this compiler: ${HOST_FULLPREFIX}gcc"
+        fi
+        HOST_CFLAGS=$HOST_CFLAGS" "$TRY_CFLAGS
+        HOST_CXXFLAGS=$HOST_CXXFLAGS" "$TRY_CFLAGS
+    fi
+
+    # We're going to generate a wrapper toolchain with the $HOST prefix
+    # i.e. if $HOST is 'i686-linux-gnu', then we're going to generate a
+    # wrapper toolchain named 'i686-linux-gnu-gcc' that will redirect
+    # to whatever HOST_FULLPREFIX points to, with appropriate modifier
+    # compiler/linker flags.
+    #
+    # This helps tremendously getting stuff to compile with the GCC
+    # configure scripts.
+    #
+    run mkdir -p "$BH_WRAPPERS_DIR" &&
+    run $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh "$BH_WRAPPERS_DIR" \
+        --src-prefix="$BH_HOST_CONFIG-" \
+        --dst-prefix="$HOST_FULLPREFIX" \
+        --cflags="$HOST_CFLAGS" \
+        --cxxflags="$HOST_CXXFLAGS" \
+        --ldflags="$HOST_LDFLAGS" \
+        --asflags="$HOST_ASFLAGS" \
+        --windres-flags="$HOST_WINDRES_FLAGS"
+}
+
+
+# Setup the build directory, i.e. a directory where all intermediate
+# files will be placed.
+#
+# $1: Build directory. Required.
+#
+# $2: Either 'preserve' or 'remove'. Indicates what to do of
+#     existing files in the build directory, if any.
+#
+# $3: Either 'release' or 'debug'. Compilation mode.
+#
+bh_setup_build_dir ()
+{
+    BH_BUILD_DIR="$1"
+    if [ -z "$BH_BUILD_DIR" ]; then
+        panic "bh_setup_build_dir received no build directory"
+    fi
+    mkdir -p "$BH_BUILD_DIR"
+    fail_panic "Could not create build directory: $BH_BUILD_DIR"
+
+    if [ "$_BH_OPTION_FORCE" ]; then
+        rm -rf "$BH_BUILD_DIR"/*
+    fi
+
+    if [ "$_BH_OPTION_NO_STRIP" ]; then
+        BH_BUILD_MODE=debug
+    else
+        BH_BUILD_MODE=release
+    fi
+
+    # The directory that will contain our toolchain wrappers
+    BH_WRAPPERS_DIR=$BH_BUILD_DIR/toolchain-wrappers
+    rm -rf "$BH_WRAPPERS_DIR" && mkdir "$BH_WRAPPERS_DIR"
+    fail_panic "Could not create wrappers dir: $BH_WRAPPERS_DIR"
+
+    # The directory that will contain our timestamps
+    BH_STAMPS_DIR=$BH_BUILD_DIR/timestamps
+    mkdir -p "$BH_STAMPS_DIR"
+    fail_panic "Could not create timestamps dir"
+}
+
+# Call this before anything else to setup a few important variables that are
+# used consistently to build any host-specific binaries.
+#
+# $1: Host system name (e.g. linux-x86), this is the name of the host system
+#     where the generated GCC binaries will run, not the current machine's
+#     type (this one is in $ORIGINAL_HOST_TAG instead).
+#
+bh_setup_build_for_host ()
+{
+    local HOST_VARNAME=$(dashes_to_underscores $1)
+    local HOST_VAR=_BH_HOST_${HOST_VARNAME}
+
+    # Determine the host configuration triplet in $HOST
+    bh_set_host_tag $1
+
+    # Note: since _bh_select_toolchain_for_host can change the value of
+    # $BH_HOST_CONFIG, we need to save it in a variable to later get the
+    # correct one when this function is called again.
+    if [ -z "$(var_value ${HOST_VAR}_SETUP)" ]; then
+        _bh_select_toolchain_for_host $1
+        var_assign ${HOST_VAR}_CONFIG $BH_HOST_CONFIG
+        var_assign ${HOST_VAR}_SETUP true
+    else
+        BH_HOST_CONFIG=$(var_value ${HOST_VAR}_CONFIG)
+    fi
+}
+
+# This function is used to setup the build environment whenever we
+# generate host-specific binaries. You should call it before invoking
+# a configure script or make.
+#
+# It assume sthat bh_setup_build_for_host was called with the right
+# host system tag and wrappers directory.
+#
+bh_setup_host_env ()
+{
+    CC=$BH_HOST_CONFIG-gcc
+    CXX=$BH_HOST_CONFIG-g++
+    LD=$BH_HOST_CONFIG-ld
+    AR=$BH_HOST_CONFIG-ar
+    AS=$BH_HOST_CONFIG-as
+    RANLIB=$BH_HOST_CONFIG-ranlib
+    NM=$BH_HOST_CONFIG-nm
+    STRIP=$BH_HOST_CONFIG-strip
+    STRINGS=$BH_HOST_CONFIG-strings
+    export CC CXX LD AR AS RANLIB NM STRIP STRINGS
+
+    CFLAGS=
+    CXXFLAGS=
+    LDFLAGS=
+    case $BH_BUILD_MODE in
+        release)
+            CFLAGS="-O2 -Os -fomit-frame-pointer -s"
+            CXXFLAGS=$CFLAGS
+            ;;
+        debug)
+            CFLAGS="-O0 -g"
+            CXXFLAGS=$CFLAGS
+            ;;
+    esac
+    export CFLAGS CXXFLAGS LDFLAGS
+
+    export PATH=$BH_WRAPPERS_DIR:$PATH
+}
+
+_bh_option_no_color ()
+{
+    bh_set_color_mode off
+}
+
+# This function is used to register a few command-line options that
+# impact the build of host binaries. Call it before invoking
+# extract_parameters to add them automatically.
+#
+bh_register_options ()
+{
+    BH_HOST_SYSTEMS="$BH_BUILD_TAG"
+    register_var_option "--systems=<list>" BH_HOST_SYSTEMS "Build binaries that run on these systems."
+
+    _BH_OPTION_FORCE=
+    register_var_option "--force" _BH_OPTION_FORCE "Force rebuild."
+
+    _BH_OPTION_NO_STRIP=
+    register_var_option "--no-strip" _BH_OPTION_NO_STRIP "Don't strip generated binaries."
+
+    register_option "--no-color" _bh_option_no_color "Don't output colored text."
+
+    if [ "$HOST_OS" = darwin ]; then
+        DARWIN_SDK_VERSION=
+        register_var_option "--darwin-sdk-version=<version>" DARWIN_SDK "Select Darwin SDK version."
+
+        DARWIN_MIN_VERSION=
+        register_var_option "--darwin-min-version=<version>" DARWIN_MIN_VERSION "Select minimum OS X version of generated host toolchains."
+    fi
+}
+
+# Execute a given command.
+#
+# NOTE: The command is run in its own sub-shell to avoid environment
+#        contamination.
+#
+# $@: command
+bh_do ()
+{
+    ("$@")
+    fail_panic
+}
+
+# Return the build install directory of a given Python version
+#
+# $1: host system tag
+# $2: python version
+# The suffix of this has to match python_ndk_install_dir
+#  as I package them from the build folder, substituting
+#  the end part of python_build_install_dir matching
+#  python_ndk_install_dir with nothing.
+python_build_install_dir ()
+{
+    echo "$BH_BUILD_DIR/$1/install/host-tools"
+}
+
+# Same as python_build_install_dir, but for the final NDK installation
+# directory. Relative to $NDK_DIR.
+#
+# $1: host system tag
+python_ndk_install_dir ()
+{
+    echo "host-tools"
+}
diff --git a/build/tools/dev-defaults.sh b/build/tools/dev-defaults.sh
new file mode 100644
index 0000000..97f357f
--- /dev/null
+++ b/build/tools/dev-defaults.sh
@@ -0,0 +1,274 @@
+# Default values used by several dev-scripts.
+#
+
+# This script is imported while building the NDK, while running the tests, and
+# when running make-standalone-toolchain.sh. Check if we have our own platforms
+# tree (as we would in an installed NDK) first, and fall back to prebuilts/ndk.
+PLATFORMS_DIR=$ANDROID_NDK_ROOT/platforms
+if [ ! -d "$PLATFORMS_DIR" ]; then
+    PLATFORMS_DIR=$ANDROID_NDK_ROOT/../prebuilts/ndk/current/platforms
+fi
+API_LEVELS=$(ls $PLATFORMS_DIR | sed 's/android-//' | sort -n)
+
+# The latest API level is the last one in the list.
+LATEST_API_LEVEL=$(echo $API_LEVELS | awk '{ print $NF }')
+
+FIRST_API64_LEVEL=21
+
+# Default ABIs for the target prebuilt binaries.
+PREBUILT_ABIS="armeabi armeabi-v7a x86 mips arm64-v8a x86_64 mips64"
+
+# Location of the STLport sources, relative to the NDK root directory
+STLPORT_SUBDIR=sources/cxx-stl/stlport
+
+# Location of the GAbi++ sources, relative to the NDK root directory
+GABIXX_SUBDIR=sources/cxx-stl/gabi++
+
+# Location of the GNU libstdc++ headers and libraries, relative to the NDK
+# root directory.
+GNUSTL_SUBDIR=sources/cxx-stl/gnu-libstdc++
+
+# Location of the LLVM libc++ headers and libraries, relative to the NDK
+# root directory.
+LIBCXX_SUBDIR=sources/cxx-stl/llvm-libc++
+
+# Location of the LLVM libc++abi headers, relative to the NDK # root directory.
+LIBCXXABI_SUBDIR=sources/cxx-stl/llvm-libc++abi/libcxxabi
+
+# Location of the gccunwind sources, relative to the NDK root directory
+GCCUNWIND_SUBDIR=sources/android/gccunwind
+
+# Location of the support sources for libc++, relative to the NDK root directory
+SUPPORT_SUBDIR=sources/android/support
+
+# The date to use when downloading toolchain sources from AOSP servers
+# Leave it empty for tip of tree.
+TOOLCHAIN_GIT_DATE=now
+
+# The space-separated list of all GCC versions we support in this NDK
+DEFAULT_GCC_VERSION_LIST="4.9"
+
+DEFAULT_GCC32_VERSION=4.9
+DEFAULT_GCC64_VERSION=4.9
+FIRST_GCC32_VERSION=4.9
+FIRST_GCC64_VERSION=4.9
+DEFAULT_LLVM_GCC32_VERSION=4.9
+DEFAULT_LLVM_GCC64_VERSION=4.9
+
+DEFAULT_BINUTILS_VERSION=2.27
+DEFAULT_MPFR_VERSION=3.1.1
+DEFAULT_GMP_VERSION=5.0.5
+DEFAULT_MPC_VERSION=1.0.1
+DEFAULT_CLOOG_VERSION=0.18.0
+DEFAULT_ISL_VERSION=0.11.1
+DEFAULT_PPL_VERSION=1.0
+DEFAULT_PYTHON_VERSION=2.7.5
+DEFAULT_PERL_VERSION=5.16.2
+
+# The list of default CPU architectures we support
+DEFAULT_ARCHS="arm x86 mips arm64 x86_64 mips64"
+
+# Default toolchain names and prefix
+#
+# This is used by get_default_toolchain_name_for_arch and get_default_toolchain_prefix_for_arch
+# defined below
+DEFAULT_ARCH_TOOLCHAIN_NAME_arm=arm-linux-androideabi
+DEFAULT_ARCH_TOOLCHAIN_PREFIX_arm=arm-linux-androideabi
+
+DEFAULT_ARCH_TOOLCHAIN_NAME_arm64=aarch64-linux-android
+DEFAULT_ARCH_TOOLCHAIN_PREFIX_arm64=aarch64-linux-android
+
+DEFAULT_ARCH_TOOLCHAIN_NAME_x86=x86
+DEFAULT_ARCH_TOOLCHAIN_PREFIX_x86=i686-linux-android
+
+DEFAULT_ARCH_TOOLCHAIN_NAME_x86_64=x86_64
+DEFAULT_ARCH_TOOLCHAIN_PREFIX_x86_64=x86_64-linux-android
+
+DEFAULT_ARCH_TOOLCHAIN_NAME_mips=mips64el-linux-android
+DEFAULT_ARCH_TOOLCHAIN_PREFIX_mips=mips64el-linux-android
+
+DEFAULT_ARCH_TOOLCHAIN_NAME_mips64=mips64el-linux-android
+DEFAULT_ARCH_TOOLCHAIN_PREFIX_mips64=mips64el-linux-android
+
+# The build number of clang used to build pieces of the NDK (like platforms).
+DEFAULT_LLVM_VERSION="2455903"
+
+# The default URL to download the LLVM tar archive
+DEFAULT_LLVM_URL="http://llvm.org/releases"
+
+# The list of default host NDK systems we support
+DEFAULT_SYSTEMS="linux-x86 windows darwin-x86"
+
+# The default issue tracker URL
+DEFAULT_ISSUE_TRACKER_URL="http://source.android.com/source/report-bugs.html"
+
+# Return the default gcc version for a given architecture
+# $1: Architecture name (e.g. 'arm')
+# Out: default arch-specific gcc version
+get_default_gcc_version_for_arch ()
+{
+    case $1 in
+       *64) echo $DEFAULT_GCC64_VERSION ;;
+       *) echo $DEFAULT_GCC32_VERSION ;;
+    esac
+}
+
+# Return the first gcc version for a given architecture
+# $1: Architecture name (e.g. 'arm')
+# Out: default arch-specific gcc version
+get_first_gcc_version_for_arch ()
+{
+    case $1 in
+       *64) echo $FIRST_GCC64_VERSION ;;
+       *) echo $FIRST_GCC32_VERSION ;;
+    esac
+}
+
+# Return default NDK ABI for a given architecture name
+# $1: Architecture name
+# Out: ABI name
+get_default_abi_for_arch ()
+{
+    local RET
+    case $1 in
+        arm)
+            RET="armeabi"
+            ;;
+        arm64)
+            RET="arm64-v8a"
+            ;;
+        x86|x86_64|mips|mips64)
+            RET="$1"
+            ;;
+        mips32r6)
+            RET="mips"
+            ;;
+        *)
+            2> echo "ERROR: Unsupported architecture name: $1, use one of: arm arm64 x86 x86_64 mips mips64"
+            exit 1
+            ;;
+    esac
+    echo "$RET"
+}
+
+
+# Retrieve the list of default ABIs supported by a given architecture
+# $1: Architecture name
+# Out: space-separated list of ABI names
+get_default_abis_for_arch ()
+{
+    local RET
+    case $1 in
+        arm)
+            RET="armeabi armeabi-v7a"
+            ;;
+        arm64)
+            RET="arm64-v8a"
+            ;;
+        x86|x86_64|mips|mips32r6|mips64)
+            RET="$1"
+            ;;
+        *)
+            2> echo "ERROR: Unsupported architecture name: $1, use one of: arm arm64 x86 x86_64 mips mips64"
+            exit 1
+            ;;
+    esac
+    echo "$RET"
+}
+
+# Return toolchain name for given architecture and GCC version
+# $1: Architecture name (e.g. 'arm')
+# $2: optional, GCC version (e.g. '4.8')
+# Out: default arch-specific toolchain name (e.g. 'arm-linux-androideabi-$GCC_VERSION')
+# Return empty for unknown arch
+get_toolchain_name_for_arch ()
+{
+    if [ ! -z "$2" ] ; then
+        eval echo \"\${DEFAULT_ARCH_TOOLCHAIN_NAME_$1}-$2\"
+    else
+        eval echo \"\${DEFAULT_ARCH_TOOLCHAIN_NAME_$1}\"
+    fi
+}
+
+# Return the default toolchain name for a given architecture
+# $1: Architecture name (e.g. 'arm')
+# Out: default arch-specific toolchain name (e.g. 'arm-linux-androideabi-$GCCVER')
+# Return empty for unknown arch
+get_default_toolchain_name_for_arch ()
+{
+    local GCCVER=$(get_default_gcc_version_for_arch $1)
+    eval echo \"\${DEFAULT_ARCH_TOOLCHAIN_NAME_$1}-$GCCVER\"
+}
+
+# Return the default toolchain program prefix for a given architecture
+# $1: Architecture name
+# Out: default arch-specific toolchain prefix (e.g. arm-linux-androideabi)
+# Return empty for unknown arch
+get_default_toolchain_prefix_for_arch ()
+{
+    eval echo "\$DEFAULT_ARCH_TOOLCHAIN_PREFIX_$1"
+}
+
+# Get the list of all toolchain names for a given architecture
+# $1: architecture (e.g. 'arm')
+# $2: comma separated versions (optional)
+# Out: list of toolchain names for this arch (e.g. arm-linux-androideabi-4.8 arm-linux-androideabi-4.9)
+# Return empty for unknown arch
+get_toolchain_name_list_for_arch ()
+{
+    local PREFIX VERSION RET ADD FIRST_GCC_VERSION VERSIONS
+    PREFIX=$(eval echo \"\$DEFAULT_ARCH_TOOLCHAIN_NAME_$1\")
+    if [ -z "$PREFIX" ]; then
+        return 0
+    fi
+    RET=""
+    FIRST_GCC_VERSION=$(get_first_gcc_version_for_arch $1)
+    ADD=""
+    VERSIONS=$(commas_to_spaces $2)
+    if [ -z "$VERSIONS" ]; then
+        VERSIONS=$DEFAULT_GCC_VERSION_LIST
+    else
+        ADD="yes" # include everything we passed explicitly
+    fi
+    for VERSION in $VERSIONS; do
+        if [ -z "$ADD" -a "$VERSION" = "$FIRST_GCC_VERSION" ]; then
+            ADD="yes"
+        fi
+        if [ -z "$ADD" ]; then
+            continue
+        fi
+        RET=$RET" $PREFIX-$VERSION"
+    done
+    RET=${RET## }
+    echo "$RET"
+}
+
+# Return the binutils version to be used by default when
+# building a given version of GCC. This is needed to ensure
+# we use binutils-2.19 when building gcc-4.4.3 for ARM and x86,
+# and later binutils in other cases (mips, or gcc-4.6+).
+#
+# Note that technically, we could use latest binutils for all versions of
+# GCC, however, in NDK r7, we did build GCC 4.4.3 with binutils-2.20.1
+# and this resulted in weird C++ debugging bugs. For NDK r7b and higher,
+# binutils was reverted to 2.19, to ensure at least
+# feature/bug compatibility.
+#
+# $1: toolchain with version number (e.g. 'arm-linux-androideabi-4.8')
+#
+get_default_binutils_version_for_gcc ()
+{
+    echo "$DEFAULT_BINUTILS_VERSION"
+}
+
+# Return the binutils version to be used by default when
+# building a given version of llvm. For llvm-3.4 or later,
+# we use binutils-2.23+ to ensure the LLVMgold.so could be
+# built properly. For llvm-3.3, we use binutils-2.21 as default.
+#
+# $1: toolchain with version numer (e.g. 'llvm-3.3')
+#
+get_default_binutils_version_for_llvm ()
+{
+    echo "$DEFAULT_BINUTILS_VERSION"
+}
diff --git a/build/tools/gen-toolchain-wrapper.sh b/build/tools/gen-toolchain-wrapper.sh
new file mode 100755
index 0000000..d4cc7f3
--- /dev/null
+++ b/build/tools/gen-toolchain-wrapper.sh
@@ -0,0 +1,179 @@
+#!/bin/bash
+#
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#  This shell script is used to rebuild the gcc and toolchain binaries
+#  for the Android NDK.
+#
+
+PROGDIR=$(dirname "$0")
+. "$PROGDIR/prebuilt-common.sh"
+
+PROGRAM_PARAMETERS="<dst-dir>"
+PROGRAM_DESCRIPTION="\
+This script allows you to generate a 'wrapper toolchain', i.e. a set of
+simple scripts that act as toolchain binaries (e.g. my-cc, my-c++, my-ld,
+etc...) but call another installed toolchain instead, possibly with additional
+command-line options.
+
+For example, imagine we want a toolchain that generates 32-bit binaries while
+running on a 64-bit system, we could call this script as:
+
+   $PROGNAME --cflags="-m32" --cxxflags="-m32" --ldflags="-m32" /tmp/my-toolchain
+
+Then, this will create programs like:
+
+   /tmp/my-toolchain/my-cc
+   /tmp/my-toolchain/my-gcc
+   /tmp/my-toolchain/my-c++
+   /tmp/my-toolchain/my-g++
+   /tmp/my-toolchain/my-ld
+   ...
+
+Where the compilers and linkers will add the -m32 flag to the command-line before
+calling the host version of 'cc', 'gcc', etc...
+
+Generally speaking:
+
+  - The 'destination toolchain' is the one that will be called by the
+    generated wrapper script. It is identified by a 'destination prefix'
+    (e.g. 'x86_64-linux-gnu-', note the dash at the end).
+
+    If is empty by default, but can be changed with --dst-prefix=<prefix>
+
+  - The 'source prefix' is the prefix added to the generated toolchain scripts,
+    it is 'my-' by default, but can be changed with --src-prefix=<prefix>
+
+  - You can use --cflags, --cxxflags, --ldflags, etc... to add extra
+    command-line flags for the generated compiler, linker, etc.. scripts
+
+"
+
+DEFAULT_SRC_PREFIX="my-"
+DEFAULT_DST_PREFIX=""
+
+SRC_PREFIX=$DEFAULT_SRC_PREFIX
+register_var_option "--src-prefix=<prefix>" SRC_PREFIX "Set source toolchain prefix"
+
+DST_PREFIX=$DEFAULT_DST_PREFIX
+register_var_option "--dst-prefix=<prefix>" DST_PREFIX "Set destination toolchain prefix"
+
+EXTRA_CFLAGS=
+register_var_option "--cflags=<options>" EXTRA_CFLAGS "Add extra C compiler flags"
+
+EXTRA_CXXFLAGS=
+register_var_option "--cxxflags=<options>" EXTRA_CXXFLAGS "Add extra C++ compiler flags"
+
+EXTRA_LDFLAGS=
+register_var_option "--ldflags=<options>" EXTRA_LDFLAGS "Add extra linker flags"
+
+EXTRA_ASFLAGS=
+register_var_option "--asflags=<options>" EXTRA_ASFLAGS "Add extra assembler flags"
+
+EXTRA_ARFLAGS=
+register_var_option "--arflags=<options>" EXTRA_ARFLAGS "Add extra archiver flags"
+
+EXTRA_WINDRES_FLAGS=
+register_var_option "--windres-flags=<options>" EXTRA_WINDRES_FLAGS "Add extra windres flags"
+
+PROGRAMS="cc gcc c++ g++ cpp as ld ar ranlib strip strings nm objdump dlltool windres"
+register_var_option "--programs=<list>" PROGRAMS "List of programs to generate wrapper for"
+
+extract_parameters "$@"
+
+PROGRAMS=$(commas_to_spaces "$PROGRAMS")
+if [ -z "$PROGRAMS" ]; then
+    panic "Empty program list, nothing to do!"
+fi
+
+DST_DIR="$PARAMETERS"
+if [ -z "$DST_DIR" ]; then
+    panic "Please provide a destination directory as a parameter! See --help for details."
+fi
+
+mkdir -p "$DST_DIR"
+fail_panic "Could not create destination directory: $DST_DIR"
+
+# Check if mingw compiler has dlfcn.h
+# $1: mignw compiler
+#
+mingw_has_dlfcn_h ()
+{
+   local CC="$1"
+
+   if [ ! -f "$CC" ]; then
+       # compiler not found
+       return 1
+   fi
+   "$CC" -xc /dev/null -dM -E | grep -q MINGW
+   if [ $? != 0 ]; then
+       # not a mingw compiler
+       return 1
+   fi
+
+   "$CC" -xc -c /dev/null -include dlfcn.h -o /dev/null > /dev/null 2>&1
+}
+
+# Generate a small wrapper program
+#
+# $1: program name, without any prefix (e.g. gcc, g++, ar, etc..)
+# $2: source prefix (e.g. 'i586-mingw32msvc-')
+# $3: destination prefix (e.g. 'i586-px-mingw32msvc-')
+# $4: destination directory for the generate program
+#
+gen_wrapper_program ()
+{
+    local PROG="$1"
+    local SRC_PREFIX="$2"
+    local DST_PREFIX="$3"
+    local DST_FILE="$4/${SRC_PREFIX}$PROG"
+    local FLAGS=""
+    local LDFLAGS=""
+
+    case $PROG in
+      cc|gcc|cpp)
+          FLAGS=$FLAGS" $EXTRA_CFLAGS"
+          if mingw_has_dlfcn_h ${DST_PREFIX}$PROG; then
+              LDFLAGS="-ldl"
+          fi
+          ;;
+      c++|g++)
+          FLAGS=$FLAGS" $EXTRA_CXXFLAGS"
+          if mingw_has_dlfcn_h ${DST_PREFIX}$PROG; then
+              LDFLAGS="-ldl"
+          fi
+          ;;
+      ar) FLAGS=$FLAGS" $EXTRA_ARFLAGS";;
+      as) FLAGS=$FLAGS" $EXTRA_ASFLAGS";;
+      ld|ld.bfd|ld.gold) FLAGS=$FLAGS" $EXTRA_LDFLAGS";;
+      windres) FLAGS=$FLAGS" $EXTRA_WINDRES_FLAGS";;
+    esac
+
+    cat > "$DST_FILE" << EOF
+#!/bin/sh
+# Auto-generated, do not edit
+${DST_PREFIX}$PROG $FLAGS "\$@" $LDFLAGS
+EOF
+    chmod +x "$DST_FILE"
+    log "Generating: ${SRC_PREFIX}$PROG"
+}
+
+log "Generating toolchain wrappers in: $DST_DIR"
+
+for PROG in $PROGRAMS; do
+  gen_wrapper_program $PROG "$SRC_PREFIX" "$DST_PREFIX" "$DST_DIR"
+done
+
+log "Done!"
diff --git a/build/tools/make-standalone-toolchain.sh b/build/tools/make-standalone-toolchain.sh
new file mode 100755
index 0000000..3eface7
--- /dev/null
+++ b/build/tools/make-standalone-toolchain.sh
@@ -0,0 +1,164 @@
+#!/bin/bash
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Create a standalone toolchain package for Android.
+
+. `dirname $0`/prebuilt-common.sh
+
+PROGRAM_PARAMETERS=""
+PROGRAM_DESCRIPTION=\
+"Generate a customized Android toolchain installation that includes
+a working sysroot. The result is something that can more easily be
+used as a standalone cross-compiler, e.g. to run configure and
+make scripts."
+
+TOOLCHAIN_NAME=
+register_var_option "--toolchain=<name>" TOOLCHAIN_NAME "Specify toolchain name"
+
+do_option_use_llvm () {
+  true;
+}
+register_option "--use-llvm" do_option_use_llvm "No-op. Clang is always available."
+
+STL=gnustl
+register_var_option "--stl=<name>" STL "Specify C++ STL"
+
+ARCH=
+register_var_option "--arch=<name>" ARCH "Specify target architecture"
+
+# Grab the ABIs that match the architecture.
+ABIS=
+register_var_option "--abis=<list>" ABIS "No-op. Derived from --arch or --toolchain."
+
+NDK_DIR=
+register_var_option "--ndk-dir=<path>" NDK_DIR "Unsupported."
+
+PACKAGE_DIR=$TMPDIR
+register_var_option "--package-dir=<path>" PACKAGE_DIR "Place package file in <path>"
+
+INSTALL_DIR=
+register_var_option "--install-dir=<path>" INSTALL_DIR "Don't create package, install files to <path> instead."
+
+DRYRUN=
+register_var_option "--dryrun" DRYRUN "Unsupported."
+
+PLATFORM=
+register_option "--platform=<name>" do_platform "Specify target Android platform/API level." "android-14"
+do_platform () {
+    PLATFORM=$1;
+    if [ "$PLATFORM" = "android-L" ]; then
+        echo "WARNING: android-L is renamed as android-21"
+        PLATFORM=android-21
+    fi
+}
+
+FORCE=
+do_force () {
+    FORCE=true
+}
+register_option "--force" do_force "Remove existing install directory."
+
+extract_parameters "$@"
+
+if [ -n "$NDK_DIR" ]; then
+    dump "The --ndk-dir argument is no longer supported."
+    exit 1
+fi
+
+if [ -n "$DRYRUN" ]; then
+    dump "--dryrun is not supported."
+    exit 1
+fi
+
+# Check TOOLCHAIN_NAME
+ARCH_BY_TOOLCHAIN_NAME=
+if [ -n "$TOOLCHAIN_NAME" ]; then
+    case $TOOLCHAIN_NAME in
+        arm-*)
+            ARCH_BY_TOOLCHAIN_NAME=arm
+            ;;
+        x86-*)
+            ARCH_BY_TOOLCHAIN_NAME=x86
+            ;;
+        mipsel-*)
+            ARCH_BY_TOOLCHAIN_NAME=mips
+            ;;
+        aarch64-*)
+            ARCH_BY_TOOLCHAIN_NAME=arm64
+            ;;
+        x86_64-linux-android-*)
+            ARCH_BY_TOOLCHAIN_NAME=x86_64
+            TOOLCHAIN_NAME=$(echo "$TOOLCHAIN_NAME" | sed -e 's/-linux-android//')
+            echo "Auto-truncate: --toolchain=$TOOLCHAIN_NAME"
+            ;;
+        x86_64-*)
+            ARCH_BY_TOOLCHAIN_NAME=x86_64
+            ;;
+        mips64el-*)
+            ARCH_BY_TOOLCHAIN_NAME=mips64
+            ;;
+        *)
+            echo "Invalid toolchain $TOOLCHAIN_NAME"
+            exit 1
+            ;;
+    esac
+fi
+# Check ARCH
+if [ -z "$ARCH" ]; then
+    ARCH=$ARCH_BY_TOOLCHAIN_NAME
+    if [ -z "$ARCH" ]; then
+        ARCH=arm
+    fi
+    echo "Auto-config: --arch=$ARCH"
+fi
+
+# Install or Package
+FORCE_ARG=
+if [ -n "$INSTALL_DIR" ] ; then
+    INSTALL_ARG="--install-dir=$INSTALL_DIR"
+    INSTALL_LOCATION=$INSTALL_DIR
+    if [ "$FORCE" = "true" ]; then
+        FORCE_ARG="--force"
+    else
+        if [ -e "$INSTALL_DIR" ]; then
+            dump "Refusing to clobber existing install directory: $INSTALL_DIR.
+
+make-standalone-toolchain.sh used to install a new toolchain into an existing
+directory. This is not desirable, as it will not clean up any stale files. If
+you wish to remove the install directory before creation, pass --force."
+            exit 1
+        fi
+    fi
+else
+    INSTALL_ARG="--package-dir=$PACKAGE_DIR"
+fi
+
+PLATFORM_NUMBER=${PLATFORM#android-}
+if [ -n "$PLATFORM_NUMBER" ]; then
+  PLATFORM_ARG="--api $PLATFORM_NUMBER"
+else
+  PLATFORM_ARG=""
+fi
+
+run python `dirname $0`/make_standalone_toolchain.py \
+    --arch $ARCH $PLATFORM_ARG --stl $STL $INSTALL_ARG $FORCE_ARG
+fail_panic "Failed to create toolchain."
+
+if [ -n "$INSTALL_DIR" ]; then
+    dump "Toolchain installed to $INSTALL_DIR."
+else
+    dump "Package installed to $PACKAGE_DIR."
+fi
diff --git a/build/tools/make_standalone_toolchain.py b/build/tools/make_standalone_toolchain.py
index 22951ae..b6b8607 100755
--- a/build/tools/make_standalone_toolchain.py
+++ b/build/tools/make_standalone_toolchain.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -16,15 +16,13 @@
 #
 """Creates a toolchain installation for a given Android target.
 
-THIS TOOL IS OBSOLETE. It is no longer necessary to create a separate toolchain for use
-with build systems that lack explicit NDK support. The compiler installed to
-<NDK>/toolchains/llvm/prebuilt/<host>/bin can be used directly. See
-https://developer.android.com/ndk/guides/other_build_systems for more details.
+The output of this tool is a more typical cross-compiling toolchain. It is
+indended to be used with existing build systems such as autotools.
 """
 import argparse
 import atexit
+from distutils.dir_util import copy_tree
 import inspect
-import json
 import logging
 import os
 import shutil
@@ -32,10 +30,10 @@
 import sys
 import tempfile
 import textwrap
-from pathlib import Path
+
 
 THIS_DIR = os.path.realpath(os.path.dirname(__file__))
-NDK_DIR = os.path.realpath(os.path.join(THIS_DIR, "../.."))
+NDK_DIR = os.path.realpath(os.path.join(THIS_DIR, '../..'))
 
 
 def logger():
@@ -44,67 +42,56 @@
 
 
 def check_ndk_or_die():
-    """Verify that our NDK installation is somewhat present or die."""
+    """Verify that our NDK installation is sane or die."""
     checks = [
-        "build/core",
-        "prebuilt",
-        "toolchains",
+        'build/core',
+        'prebuilt',
+        'toolchains',
     ]
 
     for check in checks:
         check_path = os.path.join(NDK_DIR, check)
         if not os.path.exists(check_path):
-            sys.exit("Missing {}".format(check_path))
+            sys.exit('Failed sanity check: missing {}'.format(check_path))
 
 
 def get_triple(arch):
     """Return the triple for the given architecture."""
     return {
-        "arm": "arm-linux-androideabi",
-        "arm64": "aarch64-linux-android",
-        "riscv64": "riscv64-linux-android",
-        "x86": "i686-linux-android",
-        "x86_64": "x86_64-linux-android",
-    }[arch]
-
-
-def arch_to_abi(arch: str) -> str:
-    """Return the ABI name for the given architecture."""
-    return {
-        "arm": "armeabi-v7a",
-        "arm64": "arm64-v8a",
-        "riscv64": "riscv64",
-        "x86": "x86",
-        "x86_64": "x86_64",
+        'arm': 'arm-linux-androideabi',
+        'arm64': 'aarch64-linux-android',
+        'x86': 'i686-linux-android',
+        'x86_64': 'x86_64-linux-android',
     }[arch]
 
 
 def get_host_tag_or_die():
     """Return the host tag for this platform. Die if not supported."""
-    if sys.platform.startswith("linux"):
-        return "linux-x86_64"
-    elif sys.platform == "darwin":
-        return "darwin-x86_64"
-    elif sys.platform == "win32" or sys.platform == "cygwin":
-        return "windows-x86_64"
-    sys.exit("Unsupported platform: " + sys.platform)
+    if sys.platform.startswith('linux'):
+        return 'linux-x86_64'
+    elif sys.platform == 'darwin':
+        return 'darwin-x86_64'
+    elif sys.platform == 'win32' or sys.platform == 'cygwin':
+        return 'windows-x86_64'
+    sys.exit('Unsupported platform: ' + sys.platform)
 
 
 def get_toolchain_path_or_die(host_tag):
     """Return the toolchain path or die."""
-    toolchain_path = os.path.join(NDK_DIR, "toolchains/llvm/prebuilt", host_tag)
+    toolchain_path = os.path.join(NDK_DIR, 'toolchains/llvm/prebuilt',
+                                  host_tag)
     if not os.path.exists(toolchain_path):
-        sys.exit("Could not find toolchain: {}".format(toolchain_path))
+        sys.exit('Could not find toolchain: {}'.format(toolchain_path))
     return toolchain_path
 
 
 def make_clang_target(triple, api):
     """Returns the Clang target for the given GNU triple and API combo."""
-    arch, os_name, env = triple.split("-")
-    if arch == "arm":
-        arch = "armv7a"  # Target armv7, not armv5.
+    arch, os_name, env = triple.split('-')
+    if arch == 'arm':
+        arch = 'armv7a'  # Target armv7, not armv5.
 
-    return "{}-{}-{}{}".format(arch, os_name, env, api)
+    return '{}-{}-{}{}'.format(arch, os_name, env, api)
 
 
 def make_clang_scripts(install_dir, arch, api, windows):
@@ -119,110 +106,87 @@
     Create wrapper scripts that invoke Clang with `-target` and `--sysroot`
     preset.
     """
-    with open(os.path.join(install_dir, "AndroidVersion.txt")) as version_file:
-        first_line = version_file.read().strip().splitlines()[0]
-        major, minor, _build = first_line.split(".")
+    with open(os.path.join(install_dir, 'AndroidVersion.txt')) as version_file:
+        major, minor, _build = version_file.read().strip().split('.')
 
     version_number = major + minor
 
-    exe = ""
+    exe = ''
     if windows:
-        exe = ".exe"
+        exe = '.exe'
 
-    bin_dir = os.path.join(install_dir, "bin")
-    shutil.move(
-        os.path.join(bin_dir, "clang" + exe),
-        os.path.join(bin_dir, "clang{}".format(version_number) + exe),
-    )
-    shutil.move(
-        os.path.join(bin_dir, "clang++" + exe),
-        os.path.join(bin_dir, "clang{}++".format(version_number) + exe),
-    )
+    bin_dir = os.path.join(install_dir, 'bin')
+    shutil.move(os.path.join(bin_dir, 'clang' + exe),
+                os.path.join(bin_dir, 'clang{}'.format(version_number) + exe))
+    shutil.move(os.path.join(bin_dir, 'clang++' + exe),
+                os.path.join(bin_dir, 'clang{}++'.format(
+                    version_number) + exe))
 
     triple = get_triple(arch)
     target = make_clang_target(triple, api)
-    flags = "-target {}".format(target)
+    flags = '-target {}'.format(target)
 
     # We only need mstackrealign to fix issues on 32-bit x86 pre-24. After 24,
     # this consumes an extra register unnecessarily, which can cause issues for
     # inline asm.
     # https://github.com/android-ndk/ndk/issues/693
-    if arch == "i686" and api < 24:
-        flags += " -mstackrealign"
+    if arch == 'i686' and api < 24:
+        flags += ' -mstackrealign'
 
     cxx_flags = str(flags)
 
-    clang_path = os.path.join(install_dir, "bin/clang")
-    with open(clang_path, "w") as clang:
-        clang.write(
-            textwrap.dedent(
-                """\
-            #!/usr/bin/env bash
-            bin_dir=`dirname "$0"`
+    clang_path = os.path.join(install_dir, 'bin/clang')
+    with open(clang_path, 'w') as clang:
+        clang.write(textwrap.dedent("""\
+            #!/bin/bash
             if [ "$1" != "-cc1" ]; then
-                "$bin_dir/clang{version}" {flags} "$@"
+                `dirname $0`/clang{version} {flags} "$@"
             else
                 # target/triple already spelled out.
-                "$bin_dir/clang{version}" "$@"
+                `dirname $0`/clang{version} "$@"
             fi
-        """.format(
-                    version=version_number, flags=flags
-                )
-            )
-        )
+        """.format(version=version_number, flags=flags)))
 
     mode = os.stat(clang_path).st_mode
     os.chmod(clang_path, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
 
-    clangpp_path = os.path.join(install_dir, "bin/clang++")
-    with open(clangpp_path, "w") as clangpp:
-        clangpp.write(
-            textwrap.dedent(
-                """\
-            #!/usr/bin/env bash
-            bin_dir=`dirname "$0"`
+    clangpp_path = os.path.join(install_dir, 'bin/clang++')
+    with open(clangpp_path, 'w') as clangpp:
+        clangpp.write(textwrap.dedent("""\
+            #!/bin/bash
             if [ "$1" != "-cc1" ]; then
-                "$bin_dir/clang{version}++" {flags} "$@"
+                `dirname $0`/clang{version}++ {flags} "$@"
             else
                 # target/triple already spelled out.
-                "$bin_dir/clang{version}++" "$@"
+                `dirname $0`/clang{version}++ "$@"
             fi
-        """.format(
-                    version=version_number, flags=cxx_flags
-                )
-            )
-        )
+        """.format(version=version_number, flags=cxx_flags)))
 
     mode = os.stat(clangpp_path).st_mode
     os.chmod(clangpp_path, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
 
-    shutil.copy2(
-        os.path.join(install_dir, "bin/clang"),
-        os.path.join(install_dir, "bin", triple + "-clang"),
-    )
-    shutil.copy2(
-        os.path.join(install_dir, "bin/clang++"),
-        os.path.join(install_dir, "bin", triple + "-clang++"),
-    )
+    shutil.copy2(os.path.join(install_dir, 'bin/clang'),
+                 os.path.join(install_dir, 'bin', triple + '-clang'))
+    shutil.copy2(os.path.join(install_dir, 'bin/clang++'),
+                 os.path.join(install_dir, 'bin', triple + '-clang++'))
 
     if windows:
-        for pp_suffix in ("", "++"):
-            is_cpp = pp_suffix == "++"
-            exe_name = "clang{}{}.exe".format(version_number, pp_suffix)
-            clangbat_text = textwrap.dedent(
-                """\
+        for pp_suffix in ('', '++'):
+            is_cpp = pp_suffix == '++'
+            exe_name = 'clang{}{}.exe'.format(version_number, pp_suffix)
+            clangbat_text = textwrap.dedent("""\
                 @echo off
                 setlocal
                 call :find_bin
                 if "%1" == "-cc1" goto :L
 
-                set "_BIN_DIR=" && "%_BIN_DIR%{exe}" {flags} %*
+                set "_BIN_DIR=" && %_BIN_DIR%{exe} {flags} %*
                 if ERRORLEVEL 1 exit /b 1
                 goto :done
 
                 :L
                 rem target/triple already spelled out.
-                set "_BIN_DIR=" && "%_BIN_DIR%{exe}" %*
+                set "_BIN_DIR=" && %_BIN_DIR%{exe} %*
                 if ERRORLEVEL 1 exit /b 1
                 goto :done
 
@@ -233,169 +197,124 @@
                 exit /b
 
                 :done
-            """.format(
-                    exe=exe_name, flags=cxx_flags if is_cpp else flags
-                )
-            )
+            """.format(exe=exe_name, flags=cxx_flags if is_cpp else flags))
 
-            for triple_prefix in ("", triple + "-"):
+            for triple_prefix in ('', triple + '-'):
                 clangbat_path = os.path.join(
-                    install_dir, "bin", "{}clang{}.cmd".format(triple_prefix, pp_suffix)
-                )
-                with open(clangbat_path, "w") as clangbat:
+                    install_dir, 'bin',
+                    '{}clang{}.cmd'.format(triple_prefix, pp_suffix))
+                with open(clangbat_path, 'w') as clangbat:
                     clangbat.write(clangbat_text)
 
 
 def replace_gcc_wrappers(install_path, triple, is_windows):
-    cmd = ".cmd" if is_windows else ""
+    cmd = '.cmd' if is_windows else ''
 
-    gcc = os.path.join(install_path, "bin", triple + "-gcc" + cmd)
-    clang = os.path.join(install_path, "bin", "clang" + cmd)
+    gcc = os.path.join(install_path, 'bin', triple + '-gcc' + cmd)
+    clang = os.path.join(install_path, 'bin', 'clang' + cmd)
     shutil.copy2(clang, gcc)
 
-    gpp = os.path.join(install_path, "bin", triple + "-g++" + cmd)
-    clangpp = os.path.join(install_path, "bin", "clang++" + cmd)
+    gpp = os.path.join(install_path, 'bin', triple + '-g++' + cmd)
+    clangpp = os.path.join(install_path, 'bin', 'clang++' + cmd)
     shutil.copy2(clangpp, gpp)
 
 
-def copytree(src, dst):
-    # A Python invocation running concurrently with make_standalone_toolchain.py
-    # can create a __pycache__ directory inside the src dir. Avoid copying it,
-    # because it can be in an inconsistent state.
-    shutil.copytree(
-        src, dst, ignore=shutil.ignore_patterns("__pycache__"), dirs_exist_ok=True
-    )
-
-
 def create_toolchain(install_path, arch, api, toolchain_path, host_tag):
     """Create a standalone toolchain."""
-    copytree(toolchain_path, install_path)
+    copy_tree(toolchain_path, install_path)
     triple = get_triple(arch)
-    make_clang_scripts(install_path, arch, api, host_tag == "windows-x86_64")
-    replace_gcc_wrappers(install_path, triple, host_tag == "windows-x86_64")
+    make_clang_scripts(install_path, arch, api, host_tag == 'windows-x86_64')
+    replace_gcc_wrappers(install_path, triple, host_tag == 'windows-x86_64')
 
-    prebuilt_path = os.path.join(NDK_DIR, "prebuilt", host_tag)
-    copytree(prebuilt_path, install_path)
+    prebuilt_path = os.path.join(NDK_DIR, 'prebuilt', host_tag)
+    copy_tree(prebuilt_path, install_path)
+
+    gdbserver_path = os.path.join(
+        NDK_DIR, 'prebuilt', 'android-' + arch, 'gdbserver')
+    gdbserver_install = os.path.join(install_path, 'share', 'gdbserver')
+    shutil.copytree(gdbserver_path, gdbserver_install)
 
 
 def warn_unnecessary(arch, api, host_tag):
     """Emits a warning that this script is no longer needed."""
-    if host_tag == "windows-x86_64":
-        ndk_var = "%NDK%"
-        prompt = "C:\\>"
+    if host_tag == 'windows-x86_64':
+        ndk_var = '%NDK%'
+        prompt = 'C:\\>'
     else:
-        ndk_var = "$NDK"
-        prompt = "$ "
+        ndk_var = '$NDK'
+        prompt = '$ '
 
     target = make_clang_target(get_triple(arch), api)
-    standalone_toolchain = os.path.join(
-        ndk_var, "build", "tools", "make_standalone_toolchain.py"
-    )
-    toolchain_dir = os.path.join(
-        ndk_var, "toolchains", "llvm", "prebuilt", host_tag, "bin"
-    )
-    old_clang = os.path.join("toolchain", "bin", "clang++")
-    new_clang = os.path.join(toolchain_dir, target + "-clang++")
+    standalone_toolchain = os.path.join(ndk_var, 'build', 'tools',
+                                        'make_standalone_toolchain.py')
+    toolchain_dir = os.path.join(ndk_var, 'toolchains', 'llvm', 'prebuilt',
+                                 host_tag, 'bin')
+    old_clang = os.path.join('toolchain', 'bin', 'clang++')
+    new_clang = os.path.join(toolchain_dir, target + '-clang++')
 
     logger().warning(
-        textwrap.dedent(
-            """\
-            THIS TOOL IS OBSOLETE. The {toolchain_dir} directory contains
-            target-specific scripts that perform the same task. For example,
-            instead of:
+        textwrap.dedent("""\
+        make_standalone_toolchain.py is no longer necessary. The
+        {toolchain_dir} directory contains target-specific scripts that perform
+        the same task. For example, instead of:
 
-                {prompt}python {standalone_toolchain} \\
-                    --arch {arch} --api {api} --install-dir toolchain
-                {prompt}{old_clang} src.cpp
+            {prompt}python {standalone_toolchain} \\
+                --arch {arch} --api {api} --install-dir toolchain
+            {prompt}{old_clang} src.cpp
 
-            Instead use:
+        Instead use:
 
-                {prompt}{new_clang} src.cpp
-
-            See https://developer.android.com/ndk/guides/other_build_systems for more
-            details.
-            """.format(
-                toolchain_dir=toolchain_dir,
-                prompt=prompt,
-                standalone_toolchain=standalone_toolchain,
-                arch=arch,
-                api=api,
-                old_clang=old_clang,
-                new_clang=new_clang,
-            )
-        )
-    )
-
-
-def get_min_supported_api_level(arch: str) -> int:
-    abis_json = Path(NDK_DIR) / "meta/abis.json"
-    with abis_json.open(encoding="utf-8") as abis_file:
-        data = json.load(abis_file)
-    return int(data[arch_to_abi(arch)]["min_os_version"])
+            {prompt}{new_clang} src.cpp
+        """.format(
+            toolchain_dir=toolchain_dir,
+            prompt=prompt,
+            standalone_toolchain=standalone_toolchain,
+            arch=arch,
+            api=api,
+            old_clang=old_clang,
+            new_clang=new_clang)))
 
 
 def parse_args():
     """Parse command line arguments from sys.argv."""
     parser = argparse.ArgumentParser(
-        description=inspect.getdoc(sys.modules[__name__]),
-        # Even when there are invalid arguments, we want to emit the deprecation
-        # warning. We usually wait until after argument parsing to emit that warning so
-        # that we can use the --abi and --api inputs to provide a more complete
-        # replacement example, so to do that in the case of an argument error we need to
-        # catch the error rather than allow it to exit immediately.
-        exit_on_error=False,
-    )
+        description=inspect.getdoc(sys.modules[__name__]))
 
     parser.add_argument(
-        "--arch", required=True, choices=("arm", "arm64", "riscv64", "x86", "x86_64")
-    )
+        '--arch', required=True,
+        choices=('arm', 'arm64', 'x86', 'x86_64'))
     parser.add_argument(
-        "--api", type=int, help='Target the given API version (example: "--api 24").'
-    )
+        '--api', type=int,
+        help='Target the given API version (example: "--api 24").')
     parser.add_argument(
-        "--stl", help="Ignored. Retained for compatibility until NDK r19."
-    )
+        '--stl', help='Ignored. Retained for compatibility until NDK r19.')
 
     parser.add_argument(
-        "--force",
-        action="store_true",
-        help="Remove existing installation directory if it exists.",
-    )
+        '--force', action='store_true',
+        help='Remove existing installation directory if it exists.')
     parser.add_argument(
-        "-v", "--verbose", action="count", help="Increase output verbosity."
-    )
+        '-v', '--verbose', action='count', help='Increase output verbosity.')
 
     def path_arg(arg):
         return os.path.realpath(os.path.expanduser(arg))
 
     output_group = parser.add_mutually_exclusive_group()
     output_group.add_argument(
-        "--package-dir",
-        type=path_arg,
-        default=os.getcwd(),
-        help=(
-            "Build a tarball and install it to the given directory. If "
-            "neither --package-dir nor --install-dir is specified, a "
-            "tarball will be created and installed to the current "
-            "directory."
-        ),
-    )
+        '--package-dir', type=path_arg, default=os.getcwd(),
+        help=('Build a tarball and install it to the given directory. If '
+              'neither --package-dir nor --install-dir is specified, a '
+              'tarball will be created and installed to the current '
+              'directory.'))
     output_group.add_argument(
-        "--install-dir",
-        type=path_arg,
-        help="Install toolchain to the given directory instead of packaging.",
-    )
+        '--install-dir', type=path_arg,
+        help='Install toolchain to the given directory instead of packaging.')
 
     return parser.parse_args()
 
 
 def main():
     """Program entry point."""
-    try:
-        args = parse_args()
-    except argparse.ArgumentError as ex:
-        warn_unnecessary("arm64", "21", get_host_tag_or_die())
-        sys.exit(ex)
+    args = parse_args()
 
     if args.verbose is None:
         logging.basicConfig(level=logging.WARNING)
@@ -410,21 +329,17 @@
 
     check_ndk_or_die()
 
-    min_api = get_min_supported_api_level(args.arch)
+    lp32 = args.arch in ('arm', 'x86')
+    min_api = 16 if lp32 else 21
     api = args.api
     if api is None:
         logger().warning(
-            "Defaulting to target API %d (minimum supported target for %s)",
-            min_api,
-            args.arch,
-        )
+            'Defaulting to target API %d (minimum supported target for %s)',
+            min_api, args.arch)
         api = min_api
     elif api < min_api:
-        sys.exit(
-            "{} is less than minimum platform for {} ({})".format(
-                api, args.arch, min_api
-            )
-        )
+        sys.exit('{} is less than minimum platform for {} ({})'.format(
+            api, args.arch, min_api))
 
     triple = get_triple(args.arch)
     toolchain_path = get_toolchain_path_or_die(host_tag)
@@ -433,10 +348,11 @@
         install_path = args.install_dir
         if os.path.exists(install_path):
             if args.force:
-                logger().info("Cleaning installation directory %s", install_path)
+                logger().info('Cleaning installation directory %s',
+                              install_path)
                 shutil.rmtree(install_path)
             else:
-                sys.exit("Installation directory already exists. Use --force.")
+                sys.exit('Installation directory already exists. Use --force.')
     else:
         tempdir = tempfile.mkdtemp()
         atexit.register(shutil.rmtree, tempdir)
@@ -445,19 +361,17 @@
     create_toolchain(install_path, args.arch, api, toolchain_path, host_tag)
 
     if args.install_dir is None:
-        if host_tag == "windows-x86_64":
-            package_format = "zip"
+        if host_tag == 'windows-x86_64':
+            package_format = 'zip'
         else:
-            package_format = "bztar"
+            package_format = 'bztar'
 
         package_basename = os.path.join(args.package_dir, triple)
         shutil.make_archive(
-            package_basename,
-            package_format,
+            package_basename, package_format,
             root_dir=os.path.dirname(install_path),
-            base_dir=os.path.basename(install_path),
-        )
+            base_dir=os.path.basename(install_path))
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/build/tools/ndk-common.sh b/build/tools/ndk-common.sh
new file mode 100644
index 0000000..737e0e4
--- /dev/null
+++ b/build/tools/ndk-common.sh
@@ -0,0 +1,805 @@
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# A collection of shell function definitions used by various build scripts
+# in the Android NDK (Native Development Kit)
+#
+
+# Get current script name into PROGNAME
+PROGNAME=`basename $0`
+
+if [ -z "$TMPDIR" ]; then
+    export TMPDIR=/tmp/ndk-$USER
+fi
+
+OS=`uname -s`
+if [ "$OS" == "Darwin" -a -z "$MACOSX_DEPLOYMENT_TARGET" ]; then
+    export MACOSX_DEPLOYMENT_TARGET="10.8"
+fi
+
+# Find the Android NDK root, assuming we are invoked from a script
+# within its directory structure.
+#
+# $1: Variable name that will receive the path
+# $2: Path of invoking script
+find_ndk_root ()
+{
+    # Try to auto-detect the NDK root by walking up the directory
+    # path to the current script.
+    local PROGDIR="`dirname \"$2\"`"
+    while [ -n "1" ] ; do
+        if [ -d "$PROGDIR/build/core" ] ; then
+            break
+        fi
+        if [ -z "$PROGDIR" -o "$PROGDIR" = '/' ] ; then
+            return 1
+        fi
+        PROGDIR="`cd \"$PROGDIR/..\" && pwd`"
+    done
+    eval $1="$PROGDIR"
+}
+
+# Put location of Android NDK into ANDROID_NDK_ROOT and
+# perform a tiny amount of sanity check
+#
+if [ -z "$ANDROID_NDK_ROOT" ] ; then
+    find_ndk_root ANDROID_NDK_ROOT "$0"
+    if [ $? != 0 ]; then
+        echo "Please define ANDROID_NDK_ROOT to point to the root of your"
+        echo "Android NDK installation."
+        exit 1
+    fi
+fi
+
+echo "$ANDROID_NDK_ROOT" | grep -q -e " "
+if [ $? = 0 ] ; then
+    echo "ERROR: The Android NDK installation path contains a space !"
+    echo "Please install to a different location."
+    exit 1
+fi
+
+if [ ! -d $ANDROID_NDK_ROOT ] ; then
+    echo "ERROR: Your ANDROID_NDK_ROOT variable does not point to a directory."
+    echo "ANDROID_NDK_ROOT=$ANDROID_NDK_ROOT"
+    exit 1
+fi
+
+if [ ! -f $ANDROID_NDK_ROOT/build/tools/ndk-common.sh ] ; then
+    echo "ERROR: Your ANDROID_NDK_ROOT does not contain a valid NDK build system."
+    echo "ANDROID_NDK_ROOT=$ANDROID_NDK_ROOT"
+    exit 1
+fi
+
+## Use DRYRUN to find out top-level commands.
+DRYRUN=${DRYRUN-no}
+
+## Logging support
+##
+VERBOSE=${VERBOSE-yes}
+
+dump ()
+{
+    echo "$@"
+}
+
+dump_n ()
+{
+    printf %s "$@"
+}
+
+log ()
+{
+    if [ "$VERBOSE" = "yes" ] ; then
+        echo "$@"
+    fi
+}
+
+log_n ()
+{
+    if [ "$VERBOSE" = "yes" ] ; then
+        printf %s "$@"
+    fi
+}
+
+run ()
+{
+    if [ "$DRYRUN" = "yes" ] ; then
+        echo "## SKIP COMMAND: $@"
+    elif [ "$VERBOSE" = "yes" ] ; then
+        echo "## COMMAND: $@"
+        "$@" 2>&1
+    else
+        "$@" > /dev/null 2>&1
+    fi
+}
+
+panic ()
+{
+    dump "ERROR: $@"
+    exit 1
+}
+
+fail_panic ()
+{
+    if [ $? != 0 ] ; then
+        dump "ERROR: $@"
+        exit 1
+    fi
+}
+
+fail_warning ()
+{
+    if [ $? != 0 ] ; then
+        dump "WARNING: $@"
+    fi
+}
+
+
+## Utilities
+##
+
+# Return the value of a given named variable
+# $1: variable name
+#
+# example:
+#    FOO=BAR
+#    BAR=ZOO
+#    echo `var_value $FOO`
+#    will print 'ZOO'
+#
+var_value ()
+{
+    # find a better way to do that ?
+    eval echo "$`echo $1`"
+}
+
+# convert to uppercase
+# assumes tr is installed on the platform ?
+#
+to_uppercase ()
+{
+    echo $1 | tr "[:lower:]" "[:upper:]"
+}
+
+## First, we need to detect the HOST CPU, because proper HOST_ARCH detection
+## requires platform-specific tricks.
+##
+HOST_EXE=""
+HOST_OS=`uname -s`
+HOST_ARCH=x86_64
+case "$HOST_OS" in
+    Darwin)
+        HOST_OS=darwin
+        ;;
+    Linux)
+        # note that building  32-bit binaries on x86_64 is handled later
+        HOST_OS=linux
+        ;;
+    FreeBsd)  # note: this is not tested
+        HOST_OS=freebsd
+        ;;
+    CYGWIN*|*_NT-*)
+        HOST_OS=windows
+        HOST_EXE=.exe
+        HOST_ARCH=`uname -m`
+        if [ "x$OSTYPE" = xcygwin ] ; then
+            HOST_OS=cygwin
+        fi
+        ;;
+esac
+
+log "HOST_OS=$HOST_OS"
+log "HOST_EXE=$HOST_EXE"
+log "HOST_ARCH=$HOST_ARCH"
+
+# at this point, the supported values for HOST_ARCH are:
+#   x86
+#   x86_64
+#
+# other values may be possible but haven't been tested
+#
+# at this point, the value of HOST_OS should be one of the following:
+#   linux
+#   darwin
+#    windows (MSys)
+#    cygwin
+#
+# Note that cygwin is treated as a special case because it behaves very differently
+# for a few things. Other values may be possible but have not been tested
+#
+
+# define HOST_TAG as a unique tag used to identify both the host OS and CPU
+# supported values are:
+#
+#   linux-x86_64
+#   darwin-x86_64
+#   windows
+#   windows-x86_64
+#
+# other values are possible but were not tested.
+#
+compute_host_tag ()
+{
+    HOST_TAG=${HOST_OS}-${HOST_ARCH}
+    # Special case for windows-x86 => windows
+    case $HOST_TAG in
+        windows-x86|cygwin-x86)
+            HOST_TAG="windows"
+            ;;
+        cygwin-x86_64)
+            HOST_TAG="windows-x86_64"
+            ;;
+    esac
+    log "HOST_TAG=$HOST_TAG"
+}
+
+compute_host_tag
+
+# Compute the number of host CPU cores an HOST_NUM_CPUS
+#
+case "$HOST_OS" in
+    linux)
+        HOST_NUM_CPUS=`cat /proc/cpuinfo | grep processor | wc -l`
+        ;;
+    darwin|freebsd)
+        HOST_NUM_CPUS=`sysctl -n hw.ncpu`
+        ;;
+    windows|cygwin)
+        HOST_NUM_CPUS=$NUMBER_OF_PROCESSORS
+        ;;
+    *)  # let's play safe here
+        HOST_NUM_CPUS=1
+esac
+
+log "HOST_NUM_CPUS=$HOST_NUM_CPUS"
+
+# If BUILD_NUM_CPUS is not already defined in your environment,
+# define it as the double of HOST_NUM_CPUS. This is used to
+# run Make commands in parralles, as in 'make -j$BUILD_NUM_CPUS'
+#
+if [ -z "$BUILD_NUM_CPUS" ] ; then
+    BUILD_NUM_CPUS=`expr $HOST_NUM_CPUS \* 2`
+fi
+
+log "BUILD_NUM_CPUS=$BUILD_NUM_CPUS"
+
+
+##  HOST TOOLCHAIN SUPPORT
+##
+
+# force the generation of 32-bit binaries on 64-bit systems
+#
+FORCE_32BIT=no
+force_32bit_binaries ()
+{
+    if [ "$HOST_ARCH" = x86_64 ] ; then
+        log "Forcing generation of 32-bit host binaries on $HOST_ARCH"
+        FORCE_32BIT=yes
+        HOST_ARCH=x86
+        log "HOST_ARCH=$HOST_ARCH"
+        compute_host_tag
+    fi
+}
+
+# On Windows, cygwin binaries will be generated by default, but
+# you can force mingw ones that do not link to cygwin.dll if you
+# call this function.
+#
+disable_cygwin ()
+{
+    if [ $HOST_OS = cygwin ] ; then
+        log "Disabling cygwin binaries generation"
+        CFLAGS="$CFLAGS -mno-cygwin"
+        LDFLAGS="$LDFLAGS -mno-cygwin"
+        HOST_OS=windows
+        compute_host_tag
+    fi
+}
+
+# Various probes are going to need to run a small C program
+mkdir -p $TMPDIR/tmp/tests
+
+TMPC=$TMPDIR/tmp/tests/test-$$.c
+TMPO=$TMPDIR/tmp/tests/test-$$.o
+TMPE=$TMPDIR/tmp/tests/test-$$$EXE
+TMPL=$TMPDIR/tmp/tests/test-$$.log
+
+# cleanup temporary files
+clean_temp ()
+{
+    rm -f $TMPC $TMPO $TMPL $TMPE
+}
+
+# cleanup temp files then exit with an error
+clean_exit ()
+{
+    clean_temp
+    exit 1
+}
+
+# this function will setup the compiler and linker and check that they work as advertised
+# note that you should call 'force_32bit_binaries' before this one if you want it to
+# generate 32-bit binaries on 64-bit systems (that support it).
+#
+setup_toolchain ()
+{
+    if [ -z "$CC" ] ; then
+        CC=gcc
+    fi
+    if [ -z "$CXX" ] ; then
+        CXX=g++
+    fi
+    if [ -z "$CXXFLAGS" ] ; then
+        CXXFLAGS="$CFLAGS"
+    fi
+    if [ -z "$LD" ] ; then
+        LD="$CC"
+    fi
+
+    log "Using '$CC' as the C compiler"
+
+    # check that we can compile a trivial C program with this compiler
+    mkdir -p $(dirname "$TMPC")
+    cat > $TMPC <<EOF
+int main(void) {}
+EOF
+
+    if [ "$FORCE_32BIT" = yes ] ; then
+        CC="$CC -m32"
+        CXX="$CXX -m32"
+        LD="$LD -m32"
+        compile
+        if [ $? != 0 ] ; then
+            # sometimes, we need to also tell the assembler to generate 32-bit binaries
+            # this is highly dependent on your GCC installation (and no, we can't set
+            # this flag all the time)
+            CFLAGS="$CFLAGS -Wa,--32"
+            compile
+        fi
+    fi
+
+    compile
+    if [ $? != 0 ] ; then
+        echo "your C compiler doesn't seem to work:"
+        cat $TMPL
+        clean_exit
+    fi
+    log "CC         : compiler check ok ($CC)"
+
+    # check that we can link the trivial program into an executable
+    link
+    if [ $? != 0 ] ; then
+        OLD_LD="$LD"
+        LD="$CC"
+        compile
+        link
+        if [ $? != 0 ] ; then
+            LD="$OLD_LD"
+            echo "your linker doesn't seem to work:"
+            cat $TMPL
+            clean_exit
+        fi
+    fi
+    log "Using '$LD' as the linker"
+    log "LD         : linker check ok ($LD)"
+
+    # check the C++ compiler
+    log "Using '$CXX' as the C++ compiler"
+
+    cat > $TMPC <<EOF
+#include <iostream>
+using namespace std;
+int main()
+{
+  cout << "Hello World!" << endl;
+  return 0;
+}
+EOF
+
+    compile_cpp
+    if [ $? != 0 ] ; then
+        echo "your C++ compiler doesn't seem to work"
+        cat $TMPL
+        clean_exit
+    fi
+
+    log "CXX        : C++ compiler check ok ($CXX)"
+
+    # XXX: TODO perform AR checks
+    AR=ar
+    ARFLAGS=
+}
+
+# try to compile the current source file in $TMPC into an object
+# stores the error log into $TMPL
+#
+compile ()
+{
+    log "Object     : $CC -o $TMPO -c $CFLAGS $TMPC"
+    $CC -o $TMPO -c $CFLAGS $TMPC 2> $TMPL
+}
+
+compile_cpp ()
+{
+    log "Object     : $CXX -o $TMPO -c $CXXFLAGS $TMPC"
+    $CXX -o $TMPO -c $CXXFLAGS $TMPC 2> $TMPL
+}
+
+# try to link the recently built file into an executable. error log in $TMPL
+#
+link()
+{
+    log "Link      : $LD -o $TMPE $TMPO $LDFLAGS"
+    $LD -o $TMPE $TMPO $LDFLAGS 2> $TMPL
+}
+
+# run a command
+#
+execute()
+{
+    log "Running: $*"
+    $*
+}
+
+# perform a simple compile / link / run of the source file in $TMPC
+compile_exec_run()
+{
+    log "RunExec    : $CC -o $TMPE $CFLAGS $TMPC"
+    compile
+    if [ $? != 0 ] ; then
+        echo "Failure to compile test program"
+        cat $TMPC
+        cat $TMPL
+        clean_exit
+    fi
+    link
+    if [ $? != 0 ] ; then
+        echo "Failure to link test program"
+        cat $TMPC
+        echo "------"
+        cat $TMPL
+        clean_exit
+    fi
+    $TMPE
+}
+
+pattern_match ()
+{
+    echo "$2" | grep -q -E -e "$1"
+}
+
+# Let's check that we have a working md5sum here
+check_md5sum ()
+{
+    A_MD5=`echo "A" | md5sum | cut -d' ' -f1`
+    if [ "$A_MD5" != "bf072e9119077b4e76437a93986787ef" ] ; then
+        echo "Please install md5sum on this machine"
+        exit 2
+    fi
+}
+
+# Find if a given shell program is available.
+# We need to take care of the fact that the 'which <foo>' command
+# may return either an empty string (Linux) or something like
+# "no <foo> in ..." (Darwin). Also, we need to redirect stderr
+# to /dev/null for Cygwin
+#
+# $1: variable name
+# $2: program name
+#
+# Result: set $1 to the full path of the corresponding command
+#         or to the empty/undefined string if not available
+#
+find_program ()
+{
+    local PROG RET
+    PROG=`which $2 2>/dev/null`
+    RET=$?
+    if [ $RET != 0 ]; then
+        PROG=
+    fi
+    eval $1=\"$PROG\"
+    return $RET
+}
+
+prepare_download ()
+{
+    find_program CMD_WGET wget
+    find_program CMD_CURL curl
+    find_program CMD_SCRP scp
+}
+
+find_pbzip2 ()
+{
+    if [ -z "$_PBZIP2_initialized" ] ; then
+        find_program PBZIP2 pbzip2
+        _PBZIP2_initialized="yes"
+    fi
+}
+
+# Download a file with either 'curl', 'wget' or 'scp'
+#
+# $1: source URL (e.g. http://foo.com, ssh://blah, /some/path)
+# $2: target file
+download_file ()
+{
+    # Is this HTTP, HTTPS or FTP ?
+    if pattern_match "^(http|https|ftp):.*" "$1"; then
+        if [ -n "$CMD_WGET" ] ; then
+            run $CMD_WGET -O $2 $1
+        elif [ -n "$CMD_CURL" ] ; then
+            run $CMD_CURL -o $2 $1
+        else
+            echo "Please install wget or curl on this machine"
+            exit 1
+        fi
+        return
+    fi
+
+    # Is this SSH ?
+    # Accept both ssh://<path> or <machine>:<path>
+    #
+    if pattern_match "^(ssh|[^:]+):.*" "$1"; then
+        if [ -n "$CMD_SCP" ] ; then
+            scp_src=`echo $1 | sed -e s%ssh://%%g`
+            run $CMD_SCP $scp_src $2
+        else
+            echo "Please install scp on this machine"
+            exit 1
+        fi
+        return
+    fi
+
+    # Is this a file copy ?
+    # Accept both file://<path> or /<path>
+    #
+    if pattern_match "^(file://|/).*" "$1"; then
+        cp_src=`echo $1 | sed -e s%^file://%%g`
+        run cp -f $cp_src $2
+        return
+    fi
+}
+
+# Form the relative path between from one abs path to another
+#
+# $1 : start path
+# $2 : end path
+#
+# From:
+# http://stackoverflow.com/questions/2564634/bash-convert-absolute-path-into-relative-path-given-a-current-directory
+relpath ()
+{
+    [ $# -ge 1 ] && [ $# -le 2 ] || return 1
+    current="${2:+"$1"}"
+    target="${2:-"$1"}"
+    [ "$target" != . ] || target=/
+    target="/${target##/}"
+    [ "$current" != . ] || current=/
+    current="${current:="/"}"
+    current="/${current##/}"
+    appendix="${target##/}"
+    relative=''
+    while appendix="${target#"$current"/}"
+        [ "$current" != '/' ] && [ "$appendix" = "$target" ]; do
+        if [ "$current" = "$appendix" ]; then
+            relative="${relative:-.}"
+            echo "${relative#/}"
+            return 0
+        fi
+        current="${current%/*}"
+        relative="$relative${relative:+/}.."
+    done
+    relative="$relative${relative:+${appendix:+/}}${appendix#/}"
+    echo "$relative"
+}
+
+# Pack a given archive
+#
+# $1: archive file path (including extension)
+# $2: source directory for archive content
+# $3+: list of files (including patterns), all if empty
+pack_archive ()
+{
+    local ARCHIVE="$1"
+    local SRCDIR="$2"
+    local SRCFILES
+    local TARFLAGS ZIPFLAGS
+    shift; shift;
+    if [ -z "$1" ] ; then
+        SRCFILES="*"
+    else
+        SRCFILES="$@"
+    fi
+    if [ "`basename $ARCHIVE`" = "$ARCHIVE" ] ; then
+        ARCHIVE="`pwd`/$ARCHIVE"
+    fi
+    mkdir -p `dirname $ARCHIVE`
+
+    TARFLAGS="--exclude='*.py[cod]' --exclude='*.swp' --exclude=.git --exclude=.gitignore -cf"
+    ZIPFLAGS="-x *.git* -x *.pyc -x *.pyo -0qr"
+    # Ensure symlinks are stored as is in zip files. for toolchains
+    # this can save up to 7 MB in the size of the final archive
+    #ZIPFLAGS="$ZIPFLAGS --symlinks"
+    case "$ARCHIVE" in
+        *.zip)
+            rm -f $ARCHIVE
+            (cd $SRCDIR && run zip $ZIPFLAGS "$ARCHIVE" $SRCFILES)
+            ;;
+        *.tar.bz2)
+            find_pbzip2
+            if [ -n "$PBZIP2" ] ; then
+                (cd $SRCDIR && run tar --use-compress-prog=pbzip2 $TARFLAGS "$ARCHIVE" $SRCFILES)
+            else
+                (cd $SRCDIR && run tar -j $TARFLAGS "$ARCHIVE" $SRCFILES)
+            fi
+            ;;
+        *)
+            panic "Unsupported archive format: $ARCHIVE"
+            ;;
+    esac
+}
+
+# Copy a directory, create target location if needed
+#
+# $1: source directory
+# $2: target directory location
+#
+copy_directory ()
+{
+    local SRCDIR="$1"
+    local DSTDIR="$2"
+    if [ ! -d "$SRCDIR" ] ; then
+        panic "Can't copy from non-directory: $SRCDIR"
+    fi
+    log "Copying directory: "
+    log "  from $SRCDIR"
+    log "  to $DSTDIR"
+    mkdir -p "$DSTDIR" && (cd "$SRCDIR" && 2>/dev/null tar cf - *) | (tar xf - -C "$DSTDIR")
+    fail_panic "Cannot copy to directory: $DSTDIR"
+}
+
+# Move a directory, create target location if needed
+#
+# $1: source directory
+# $2: target directory location
+#
+move_directory ()
+{
+    local SRCDIR="$1"
+    local DSTDIR="$2"
+    if [ ! -d "$SRCDIR" ] ; then
+        panic "Can't move from non-directory: $SRCDIR"
+    fi
+    log "Move directory: "
+    log "  from $SRCDIR"
+    log "  to $DSTDIR"
+    mkdir -p "$DSTDIR" && (mv "$SRCDIR"/* "$DSTDIR")
+    fail_panic "Cannot move to directory: $DSTDIR"
+}
+
+# This is the same than copy_directory(), but symlinks will be replaced
+# by the file they actually point to instead.
+copy_directory_nolinks ()
+{
+    local SRCDIR="$1"
+    local DSTDIR="$2"
+    if [ ! -d "$SRCDIR" ] ; then
+        panic "Can't copy from non-directory: $SRCDIR"
+    fi
+    log "Copying directory (without symlinks): "
+    log "  from $SRCDIR"
+    log "  to $DSTDIR"
+    mkdir -p "$DSTDIR" && (cd "$SRCDIR" && tar chf - *) | (tar xf - -C "$DSTDIR")
+    fail_panic "Cannot copy to directory: $DSTDIR"
+}
+
+# Copy certain files from one directory to another one
+# $1: source directory
+# $2: target directory
+# $3+: file list (including patterns)
+copy_file_list ()
+{
+    local SRCDIR="$1"
+    local DSTDIR="$2"
+    shift; shift;
+    if [ ! -d "$SRCDIR" ] ; then
+        panic "Cant' copy from non-directory: $SRCDIR"
+    fi
+    log "Copying file: $@"
+    log "  from $SRCDIR"
+    log "  to $DSTDIR"
+    mkdir -p "$DSTDIR" && (cd "$SRCDIR" && (echo $@ | tr ' ' '\n' | tar hcf - -T -)) | (tar xf - -C "$DSTDIR")
+    fail_panic "Cannot copy files to directory: $DSTDIR"
+}
+
+# Rotate a log file
+# If the given log file exist, add a -1 to the end of the file.
+# If older log files exist, rename them to -<n+1>
+# $1: log file
+# $2: maximum version to retain [optional]
+rotate_log ()
+{
+    # Default Maximum versions to retain
+    local MAXVER="5"
+    local LOGFILE="$1"
+    shift;
+    if [ ! -z "$1" ] ; then
+        local tmpmax="$1"
+        shift;
+        tmpmax=`expr $tmpmax + 0`
+        if [ $tmpmax -lt 1 ] ; then
+            panic "Invalid maximum log file versions '$tmpmax' invalid; defaulting to $MAXVER"
+        else
+            MAXVER=$tmpmax;
+        fi
+    fi
+
+    # Do Nothing if the log file does not exist
+    if [ ! -f "${LOGFILE}" ] ; then
+        return
+    fi
+
+    # Rename existing older versions
+    ver=$MAXVER
+    while [ $ver -ge 1 ]
+    do
+        local prev=$(( $ver - 1 ))
+        local old="-$prev"
+
+        # Instead of old version 0; use the original filename
+        if [ $ver -eq 1 ] ; then
+            old=""
+        fi
+
+        if [ -f "${LOGFILE}${old}" ] ; then
+            mv -f "${LOGFILE}${old}" "${LOGFILE}-${ver}"
+        fi
+
+        ver=$prev
+    done
+}
+
+# Dereference symlink
+# $1+: directories
+dereference_symlink ()
+{
+    local DIRECTORY SYMLINKS DIR FILE LINK
+    for DIRECTORY in "$@"; do
+        if [ -d "$DIRECTORY" ]; then
+            while true; do
+                # Find all symlinks in this directory.
+                SYMLINKS=`find $DIRECTORY -type l`
+                if [ -z "$SYMLINKS" ]; then
+                    break;
+                fi
+                # Iterate symlinks
+                for SYMLINK in $SYMLINKS; do
+                    if [ -L "$SYMLINK" ]; then
+                        DIR=`dirname "$SYMLINK"`
+                        FILE=`basename "$SYMLINK"`
+                        # Note that if `readlink $FILE` is also a link, we want to deal
+                        # with it in the next iteration.  There is potential infinite-loop
+                        # situation for cicular link doesn't exist in our case, though.
+                        (cd "$DIR" && \
+                         LINK=`readlink "$FILE"` && \
+                         test ! -L "$LINK" && \
+                         rm -f "$FILE" && \
+                         cp -a "$LINK" "$FILE")
+                    fi
+                done
+            done
+        fi
+    done
+}
diff --git a/build/tools/ndk_bin_common.sh b/build/tools/ndk_bin_common.sh
deleted file mode 100644
index 2d08a4c..0000000
--- a/build/tools/ndk_bin_common.sh
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-HOST_OS=$(uname -s)
-case $HOST_OS in
-  Darwin) HOST_OS=darwin;;
-  Linux) HOST_OS=linux;;
-  FreeBsd) HOST_OS=freebsd;;
-  CYGWIN*|*_NT-*) HOST_OS=cygwin;;
-  *) echo "ERROR: Unknown host operating system: $HOST_OS"
-     exit 1
-esac
-
-HOST_ARCH=$(uname -m)
-case $HOST_ARCH in
-  arm64) HOST_ARCH=arm64;;
-  i?86) HOST_ARCH=x86;;
-  x86_64|amd64) HOST_ARCH=x86_64;;
-  *) echo "ERROR: Unknown host CPU architecture: $HOST_ARCH"
-     exit 1
-esac
-
-HOST_TAG=$HOST_OS-$HOST_ARCH
-
-if [ $HOST_TAG = darwin-arm64 ]; then
-  # The NDK ships universal arm64+x86_64 binaries in the darwin-x86_64
-  # directory.
-  HOST_TAG=darwin-x86_64
-fi
-
-if [ $HOST_OS = cygwin ]; then
-  ANDROID_NDK_PYTHON=$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/windows-x86_64/python3/python.exe
-else
-  ANDROID_NDK_PYTHON=$ANDROID_NDK_ROOT/toolchains/llvm/prebuilt/$HOST_TAG/python3/bin/python3
-fi
diff --git a/build/tools/prebuilt-common.sh b/build/tools/prebuilt-common.sh
new file mode 100644
index 0000000..3a0e095
--- /dev/null
+++ b/build/tools/prebuilt-common.sh
@@ -0,0 +1,1456 @@
+# Common functions for all prebuilt-related scripts
+# This is included/sourced by other scripts
+#
+
+# ensure stable sort order
+export LC_ALL=C
+
+# NDK_BUILDTOOLS_PATH should point to the directory containing
+# this script. If it is not defined, assume that this is one of
+# the scripts in the same directory that sourced this file.
+#
+if [ -z "$NDK_BUILDTOOLS_PATH" ]; then
+    NDK_BUILDTOOLS_PATH=$(dirname $0)
+    if [ ! -f "$NDK_BUILDTOOLS_PATH/prebuilt-common.sh" ]; then
+        echo "INTERNAL ERROR: Please define NDK_BUILDTOOLS_PATH to point to \$NDK/build/tools"
+        exit 1
+    fi
+fi
+
+# Warn if /bin/sh isn't bash.
+if [ -z "$BASH_VERSION" ] ; then
+    echo "WARNING: The shell running this script isn't bash.  Although we try to avoid bashism in scripts, things can happen."
+fi
+
+NDK_BUILDTOOLS_ABSPATH=$(cd $NDK_BUILDTOOLS_PATH && pwd)
+
+. $NDK_BUILDTOOLS_PATH/ndk-common.sh
+. $NDK_BUILDTOOLS_PATH/dev-defaults.sh
+
+
+# Given an input string of the form <foo>-<bar>-<version>, where
+# <version> can be <major>.<minor>, extract <major>
+extract_version ()
+{
+    echo $1 | tr '-' '\n' | tail -1
+}
+
+# $1: versioned name (e.g. arm-linux-androideabi-4.8)
+# Out: major version (e.g. 4)
+#
+# Examples:  arm-linux-androideabi-4.4.3 -> 4
+#            gmp-0.81 -> 0
+#
+extract_major_version ()
+{
+    local RET=$(extract_version $1 | cut -d . -f 1)
+    RET=${RET:-0}
+    echo $RET
+}
+
+# Same as extract_major_version, but for the minor version number
+# $1: versioned named
+# Out: minor version
+#
+extract_minor_version ()
+{
+    local RET=$(extract_version $1 | cut -d . -f 2)
+    RET=${RET:-0}
+    echo $RET
+}
+
+# Compare two version numbers and only succeeds if the first one is
+# greater than or equal to the second one.
+#
+# $1: first version (e.g. 4.9)
+# $2: second version (e.g. 4.8)
+#
+# Example: version_is_at_least 4.9 4.8 --> success
+#
+version_is_at_least ()
+{
+    local A_MAJOR A_MINOR B_MAJOR B_MINOR
+    A_MAJOR=$(extract_major_version $1)
+    B_MAJOR=$(extract_major_version $2)
+
+    if [ $A_MAJOR -lt $B_MAJOR ]; then
+        return 1
+    elif [ $A_MAJOR -gt $B_MAJOR ]; then
+        return 0
+    fi
+
+    # We have A_MAJOR == B_MAJOR here
+
+    A_MINOR=$(extract_minor_version $1)
+    B_MINOR=$(extract_minor_version $2)
+
+    if [ $A_MINOR -lt $B_MINOR ]; then
+        return 1
+    else
+        return 0
+    fi
+}
+
+#====================================================
+#
+#  UTILITY FUNCTIONS
+#
+#====================================================
+
+# Return the maximum length of a series of strings
+#
+# Usage:  len=`max_length <string1> <string2> ...`
+#
+max_length ()
+{
+    echo "$@" | tr ' ' '\n' | awk 'BEGIN {max=0} {len=length($1); if (len > max) max=len} END {print max}'
+}
+
+# Translate dashes to underscores
+# Usage:  str=`dashes_to_underscores <values>`
+dashes_to_underscores ()
+{
+    echo "$@" | tr '-' '_'
+}
+
+# Translate underscores to dashes
+# Usage: str=`underscores_to_dashes <values>`
+underscores_to_dashes ()
+{
+    echo "$@" | tr '_' '-'
+}
+
+# Translate commas to spaces
+# Usage: str=`commas_to_spaces <list>`
+commas_to_spaces ()
+{
+    echo "$@" | tr ',' ' '
+}
+
+# Translate spaces to commas
+# Usage: list=`spaces_to_commas <string>`
+spaces_to_commas ()
+{
+    echo "$@" | tr ' ' ','
+}
+
+# Remove trailing path of a path
+# $1: path
+remove_trailing_slash () {
+    echo ${1%%/}
+}
+
+# Reverse a file path directory
+# foo -> .
+# foo/bar -> ..
+# foo/bar/zoo -> ../..
+reverse_path ()
+{
+    local path cur item
+    path=${1%%/} # remove trailing slash
+    cur="."
+    if [ "$path" != "." ] ; then
+        for item in $(echo "$path" | tr '/' ' '); do
+            cur="../$cur"
+        done
+    fi
+    echo ${cur%%/.}
+}
+
+# test_reverse_path ()
+# {
+#     rr=`reverse_path $1`
+#     if [ "$rr" != "$2" ] ; then
+#         echo "ERROR: reverse_path '$1' -> '$rr' (expected '$2')"
+#     fi
+# }
+#
+# test_reverse_path . .
+# test_reverse_path ./ .
+# test_reverse_path foo ..
+# test_reverse_path foo/ ..
+# test_reverse_path foo/bar ../..
+# test_reverse_path foo/bar/ ../..
+# test_reverse_path foo/bar/zoo ../../..
+# test_reverse_path foo/bar/zoo/ ../../..
+
+# Sort a space-separated list and remove duplicates
+# $1+: slist
+# Output: new slist
+sort_uniq ()
+{
+    local RET
+    RET=$(echo "$@" | tr ' ' '\n' | sort -u)
+    echo $RET
+}
+
+# Return the list of all regular files under a given directory
+# $1: Directory path
+# Output: list of files, relative to $1
+list_files_under ()
+{
+    if [ -d "$1" ]; then
+        (cd $1 && find . -type f | sed -e "s!./!!" | sort -u)
+    else
+        echo ""
+    fi
+}
+
+# Returns all words in text that do not match any of the pattern
+# $1: pattern
+# $2: text
+filter_out ()
+{
+    local PATTERN="$1"
+    local TEXT="$2"
+    for pat in $PATTERN; do
+        pat=$"${pat//\//\\/}"
+        TEXT=$(echo $TEXT | sed -e 's/'$pat' //g' -e 's/'$pat'$//g')
+    done
+    echo $TEXT
+}
+
+# Assign a value to a variable
+# $1: Variable name
+# $2: Value
+var_assign ()
+{
+    eval $1=\"$2\"
+}
+
+#====================================================
+#
+#  OPTION PROCESSING
+#
+#====================================================
+
+# We recognize the following option formats:
+#
+#  -f
+#  --flag
+#
+#  -s<value>
+#  --setting=<value>
+#
+
+# NOTE: We translate '-' into '_' when storing the options in global variables
+#
+
+OPTIONS=""
+OPTION_FLAGS=""
+OPTION_SETTINGS=""
+
+# Set a given option attribute
+# $1: option name
+# $2: option attribute
+# $3: attribute value
+#
+option_set_attr ()
+{
+    eval OPTIONS_$1_$2=\"$3\"
+}
+
+# Get a given option attribute
+# $1: option name
+# $2: option attribute
+#
+option_get_attr ()
+{
+    echo `var_value OPTIONS_$1_$2`
+}
+
+# Register a new option
+# $1: option
+# $2: small abstract for the option
+# $3: optional. default value
+#
+register_option_internal ()
+{
+    optlabel=
+    optname=
+    optvalue=
+    opttype=
+    while [ -n "1" ] ; do
+        # Check for something like --setting=<value>
+        echo "$1" | grep -q -E -e '^--[^=]+=<.+>$'
+        if [ $? = 0 ] ; then
+            optlabel=`expr -- "$1" : '\(--[^=]*\)=.*'`
+            optvalue=`expr -- "$1" : '--[^=]*=\(<.*>\)'`
+            opttype="long_setting"
+            break
+        fi
+
+        # Check for something like --flag
+        echo "$1" | grep -q -E -e '^--[^=]+$'
+        if [ $? = 0 ] ; then
+            optlabel="$1"
+            opttype="long_flag"
+            break
+        fi
+
+        # Check for something like -f<value>
+        echo "$1" | grep -q -E -e '^-[A-Za-z0-9]<.+>$'
+        if [ $? = 0 ] ; then
+            optlabel=`expr -- "$1" : '\(-.\).*'`
+            optvalue=`expr -- "$1" : '-.\(<.+>\)'`
+            opttype="short_setting"
+            break
+        fi
+
+        # Check for something like -f
+        echo "$1" | grep -q -E -e '^-.$'
+        if [ $? = 0 ] ; then
+            optlabel="$1"
+            opttype="short_flag"
+            break
+        fi
+
+        echo "ERROR: Invalid option format: $1"
+        echo "       Check register_option call"
+        exit 1
+    done
+
+    log "new option: type='$opttype' name='$optlabel' value='$optvalue'"
+
+    optname=`dashes_to_underscores $optlabel`
+    OPTIONS="$OPTIONS $optname"
+    OPTIONS_TEXT="$OPTIONS_TEXT $1"
+    option_set_attr $optname label "$optlabel"
+    option_set_attr $optname otype "$opttype"
+    option_set_attr $optname value "$optvalue"
+    option_set_attr $optname text "$1"
+    option_set_attr $optname abstract "$2"
+    option_set_attr $optname default "$3"
+}
+
+# Register a new option with a function callback.
+#
+# $1: option
+# $2: name of function that will be called when the option is parsed
+# $3: small abstract for the option
+# $4: optional. default value
+#
+register_option ()
+{
+    local optname optvalue opttype optlabel
+    register_option_internal "$1" "$3" "$4"
+    option_set_attr $optname funcname "$2"
+}
+
+# Register a new option with a variable store
+#
+# $1: option
+# $2: name of variable that will be set by this option
+# $3: small abstract for the option
+#
+# NOTE: The current value of $2 is used as the default
+#
+register_var_option ()
+{
+    local optname optvalue opttype optlabel
+    register_option_internal "$1" "$3" "`var_value $2`"
+    option_set_attr $optname varname "$2"
+}
+
+
+MINGW=no
+DARWIN=no
+do_mingw_option ()
+{
+    if [ "$DARWIN" = "yes" ]; then
+        echo "Can not have both --mingw and --darwin"
+        exit 1
+    fi
+    MINGW=yes;
+}
+do_darwin_option ()
+{
+    if [ "$MINGW" = "yes" ]; then
+        echo "Can not have both --mingw and --darwin"
+        exit 1
+    fi
+    DARWIN=yes; 
+}
+
+register_canadian_option ()
+{
+    if [ "$HOST_OS" = "linux" ] ; then
+        register_option "--mingw" do_mingw_option "Generate windows binaries on Linux."
+        register_option "--darwin" do_darwin_option "Generate darwin binaries on Linux."
+    fi
+}
+
+TRY64=no
+do_try64_option () { TRY64=yes; }
+
+register_try64_option ()
+{
+    register_option "--try-64" do_try64_option "Generate 64-bit only binaries."
+}
+
+
+register_jobs_option ()
+{
+    NUM_JOBS=$BUILD_NUM_CPUS
+    register_var_option "-j<number>" NUM_JOBS "Use <number> parallel build jobs"
+}
+
+# Print the help, including a list of registered options for this program
+# Note: Assumes PROGRAM_PARAMETERS and PROGRAM_DESCRIPTION exist and
+#       correspond to the parameters list and the program description
+#
+print_help ()
+{
+    local opt text abstract default
+
+    echo "Usage: $PROGNAME [options] $PROGRAM_PARAMETERS"
+    echo ""
+    if [ -n "$PROGRAM_DESCRIPTION" ] ; then
+        echo "$PROGRAM_DESCRIPTION"
+        echo ""
+    fi
+    echo "Valid options (defaults are in brackets):"
+    echo ""
+
+    maxw=`max_length "$OPTIONS_TEXT"`
+    AWK_SCRIPT=`echo "{ printf \"%-${maxw}s\", \\$1 }"`
+    for opt in $OPTIONS; do
+        text=`option_get_attr $opt text | awk "$AWK_SCRIPT"`
+        abstract=`option_get_attr $opt abstract`
+        default=`option_get_attr $opt default`
+        if [ -n "$default" ] ; then
+            echo "  $text     $abstract [$default]"
+        else
+            echo "  $text     $abstract"
+        fi
+    done
+    echo ""
+}
+
+option_panic_no_args ()
+{
+    echo "ERROR: Option '$1' does not take arguments. See --help for usage."
+    exit 1
+}
+
+option_panic_missing_arg ()
+{
+    echo "ERROR: Option '$1' requires an argument. See --help for usage."
+    exit 1
+}
+
+extract_parameters ()
+{
+    local opt optname otype value name fin funcname
+    PARAMETERS=""
+    while [ -n "$1" ] ; do
+        # If the parameter does not begin with a dash
+        # it is not an option.
+        param=`expr -- "$1" : '^\([^\-].*\)$'`
+        if [ -n "$param" ] ; then
+            if [ -z "$PARAMETERS" ] ; then
+                PARAMETERS="$1"
+            else
+                PARAMETERS="$PARAMETERS $1"
+            fi
+            shift
+            continue
+        fi
+
+        while [ -n "1" ] ; do
+            # Try to match a long setting, i.e. --option=value
+            opt=`expr -- "$1" : '^\(--[^=]*\)=.*$'`
+            if [ -n "$opt" ] ; then
+                otype="long_setting"
+                value=`expr -- "$1" : '^--[^=]*=\(.*\)$'`
+                break
+            fi
+
+            # Try to match a long flag, i.e. --option
+            opt=`expr -- "$1" : '^\(--.*\)$'`
+            if [ -n "$opt" ] ; then
+                otype="long_flag"
+                value="yes"
+                break
+            fi
+
+            # Try to match a short setting, i.e. -o<value>
+            opt=`expr -- "$1" : '^\(-[A-Za-z0-9]\)..*$'`
+            if [ -n "$opt" ] ; then
+                otype="short_setting"
+                value=`expr -- "$1" : '^-.\(.*\)$'`
+                break
+            fi
+
+            # Try to match a short flag, i.e. -o
+            opt=`expr -- "$1" : '^\(-.\)$'`
+            if [ -n "$opt" ] ; then
+                otype="short_flag"
+                value="yes"
+                break
+            fi
+
+            echo "ERROR: Unknown option '$1'. Use --help for list of valid values."
+            exit 1
+        done
+
+        #echo "Found opt='$opt' otype='$otype' value='$value'"
+
+        name=`dashes_to_underscores $opt`
+        found=0
+        for xopt in $OPTIONS; do
+            if [ "$name" != "$xopt" ] ; then
+                continue
+            fi
+            # Check that the type is correct here
+            #
+            # This also allows us to handle -o <value> as -o<value>
+            #
+            xotype=`option_get_attr $name otype`
+            if [ "$otype" != "$xotype" ] ; then
+                case "$xotype" in
+                "short_flag")
+                    option_panic_no_args $opt
+                    ;;
+                "short_setting")
+                    if [ -z "$2" ] ; then
+                        option_panic_missing_arg $opt
+                    fi
+                    value="$2"
+                    shift
+                    ;;
+                "long_flag")
+                    option_panic_no_args $opt
+                    ;;
+                "long_setting")
+                    option_panic_missing_arg $opt
+                    ;;
+                esac
+            fi
+            found=1
+            break
+            break
+        done
+        if [ "$found" = "0" ] ; then
+            echo "ERROR: Unknown option '$opt'. See --help for usage."
+            exit 1
+        fi
+        # Set variable or launch option-specific function.
+        varname=`option_get_attr $name varname`
+        if [ -n "$varname" ] ; then
+            eval ${varname}=\"$value\"
+        else
+            eval `option_get_attr $name funcname` \"$value\"
+        fi
+        shift
+    done
+}
+
+do_option_help ()
+{
+    print_help
+    exit 0
+}
+
+VERBOSE=no
+do_option_verbose ()
+{
+    VERBOSE=yes
+}
+
+DRYRUN=no
+do_option_dryrun ()
+{
+    DRYRUN=yes
+}
+
+register_option "--help"          do_option_help     "Print this help."
+register_option "--verbose"       do_option_verbose  "Enable verbose mode."
+register_option "--dryrun"        do_option_dryrun   "Set to dryrun mode."
+
+#====================================================
+#
+#  TOOLCHAIN AND ABI PROCESSING
+#
+#====================================================
+
+# Determine optional variable value
+# $1: final variable name
+# $2: option variable name
+# $3: small description for the option
+fix_option ()
+{
+    if [ -n "$2" ] ; then
+        eval $1="$2"
+        log "Using specific $3: $2"
+    else
+        log "Using default $3: `var_value $1`"
+    fi
+}
+
+
+# If SYSROOT is empty, check that $1/$2 contains a sysroot
+# and set the variable to it.
+#
+# $1: sysroot path
+# $2: platform/arch suffix
+check_sysroot ()
+{
+    if [ -z "$SYSROOT" ] ; then
+        log "Probing directory for sysroot: $1/$2"
+        if [ -d $1/$2 ] ; then
+            SYSROOT=$1/$2
+        fi
+    fi
+}
+
+# Determine sysroot
+# $1: Option value (or empty)
+#
+fix_sysroot ()
+{
+    if [ -n "$1" ] ; then
+        eval SYSROOT="$1"
+        log "Using specified sysroot: $1"
+    else
+        SYSROOT_SUFFIX=$PLATFORM/arch-$ARCH
+        SYSROOT=
+        check_sysroot $ANDROID_BUILD_TOP/prebuilts/ndk/current/platforms $SYSROOT_SUFFIX
+        check_sysroot $ANDROID_NDK_ROOT/platforms $SYSROOT_SUFFIX
+        check_sysroot `dirname $ANDROID_NDK_ROOT`/development/ndk/platforms $SYSROOT_SUFFIX
+
+        if [ -z "$SYSROOT" ] ; then
+            echo "ERROR: Could not find NDK sysroot path for $SYSROOT_SUFFIX."
+            echo "       Use --sysroot=<path> to specify one."
+            exit 1
+        fi
+    fi
+
+    if [ ! -f $SYSROOT/usr/include/stdlib.h ] ; then
+        echo "ERROR: Invalid sysroot path: $SYSROOT"
+        echo "       Use --sysroot=<path> to indicate a valid one."
+        exit 1
+    fi
+}
+
+# Check for the availability of a compatibility SDK in Darwin
+# this can be used to generate binaries compatible with either Tiger or
+# Leopard.
+#
+# $1: SDK root path
+# $2: Optional MacOS X minimum version (e.g. 10.5)
+DARWIN_MINVER=10.6
+check_darwin_sdk ()
+{
+    local MACSDK="$1"
+    local MINVER=$2
+
+    if [ -z "$MINVER" ] ; then
+        # expect SDK root path ended up with either MacOSX##.#.sdk or MacOSX##.#u.sdk
+        MINVER=${MACSDK##*MacOSX}
+        MINVER=${MINVER%%.sdk*}
+        if [ "$MINVER" = "10.4u" ]; then
+            MINVER=10.4
+        fi
+    fi
+    if [ -d "$MACSDK" ] ; then
+        HOST_CFLAGS=$HOST_CFLAGS" -isysroot $MACSDK -mmacosx-version-min=$MINVER -DMAXOSX_DEPLOYEMENT_TARGET=$MINVER"
+        HOST_LDFLAGS=$HOST_LDFLAGS" -Wl,-syslibroot,$MACSDK -mmacosx-version-min=$MINVER"
+        DARWIN_MINVER=$MINVER
+        return 0  # success
+    fi
+    return 1
+}
+
+# Probe Darwin SDK in specified diectory $DARWIN_SYSROOT, or
+# /Developer/SDKs/MacOSX10.6.sdk
+#
+probe_darwin_sdk ()
+{
+    if [ -n "$DARWIN_SYSROOT" ]; then
+        if check_darwin_sdk "$DARWIN_SYSROOT"; then
+            log "Use darwin sysroot $DARWIN_SYSROOT"
+        else
+            echo "darwin sysroot $DARWIN_SYSROOT is not valid"
+            exit 1
+        fi
+    elif check_darwin_sdk /Developer/SDKs/MacOSX10.6.sdk 10.6; then
+        log "Generating Snow Leopard-compatible binaries!"
+    else
+        local version=`sw_vers -productVersion`
+        log "Generating $version-compatible binaries!"
+    fi
+}
+
+handle_canadian_build ()
+{
+    HOST_EXE=
+    if [ "$MINGW" = "yes" -o "$DARWIN" = "yes" ] ; then
+        case $HOST_TAG in
+            linux-*)
+                ;;
+            *)
+                echo "ERROR: Can only enable --mingw or --darwin on Linux platforms !"
+                exit 1
+                ;;
+        esac
+        if [ "$MINGW" = "yes" ] ; then
+            if [ "$TRY64" = "yes" ]; then
+                ABI_CONFIGURE_HOST=x86_64-w64-mingw32
+                HOST_TAG=windows-x86_64
+            else
+                # NOTE: A wrapper is generated for i686-w64-mingw32.
+                ABI_CONFIGURE_HOST=i686-w64-mingw32
+                HOST_TAG=windows
+            fi
+            HOST_OS=windows
+            HOST_EXE=.exe
+        else
+            if [ "$TRY64" = "yes" ]; then
+                ABI_CONFIGURE_HOST=x86_64-apple-darwin
+                HOST_TAG=darwin-x86_64
+            else
+                ABI_CONFIGURE_HOST=i686-apple-darwin
+                HOST_TAG=darwin-x86
+            fi
+            HOST_OS=darwin
+        fi
+    fi
+}
+
+# Find mingw toolchain
+#
+# Set MINGW_GCC to the found mingw toolchain
+#
+find_mingw_toolchain ()
+{
+    local LINUX_GCC_PREBUILTS=$ANDROID_BUILD_TOP/prebuilts/gcc/linux-x86
+    local MINGW_ROOT=$LINUX_GCC_PREBUILTS/host/x86_64-w64-mingw32-4.8/
+    BINPREFIX=x86_64-w64-mingw32-
+    MINGW_GCC=$MINGW_ROOT/bin/${BINPREFIX}gcc
+    if [ ! -e "$MINGW_GCC" ]; then
+        panic "$MINGW_GCC does not exist"
+    fi
+
+    if [ "$HOST_ARCH" = "x86_64" -a "$TRY64" = "yes" ]; then
+        DEBIAN_NAME=mingw-w64
+    else
+        # we are trying 32 bit anyway, so forcing it to avoid build issues
+        force_32bit_binaries
+        DEBIAN_NAME=mingw-w64
+    fi
+}
+
+# Check there is a working cross-toolchain installed.
+#
+# $1: install directory for mingw/darwin wrapper toolchain
+#
+# NOTE: Build scripts need to call this function to create MinGW wrappers,
+# even if they aren't doing a "Canadian" cross-compile with different build,
+# host, and target systems.
+#
+prepare_canadian_toolchain ()
+{
+    if [ "$MINGW" != "yes" -a "$DARWIN" != "yes" ]; then
+        return
+    fi
+    CROSS_GCC=
+    if [ "$MINGW" = "yes" ]; then
+        find_mingw_toolchain
+        CROSS_GCC=$MINGW_GCC
+    else
+        if [ -z "$DARWIN_TOOLCHAIN" ]; then
+            echo "Please set DARWIN_TOOLCHAIN to darwin cross-toolchain"
+            exit 1
+        fi
+        if [ ! -f "${DARWIN_TOOLCHAIN}-gcc" ]; then
+            echo "darwin cross-toolchain $DARWIN_TOOLCHAIN-gcc doesn't exist"
+            exit 1
+        fi
+        if [ "$HOST_ARCH" = "x86_64" -a "$TRY64" = "yes" ]; then
+            BINPREFIX=x86_64-apple-darwin-
+            DEBIAN_NAME=darwin64
+            HOST_CFLAGS=$HOST_CFLAGS" -m64"
+        else
+            force_32bit_binaries
+            BINPREFIX=i686-apple-darwin-
+            DEBIAN_NAME=darwin32
+            HOST_CFLAGS=$HOST_CFLAGS" -m32"
+        fi
+        CROSS_GCC=${DARWIN_TOOLCHAIN}-gcc
+        probe_darwin_sdk
+    fi
+
+    # Create a wrapper toolchain, and prepend its dir to our PATH
+    CROSS_WRAP_DIR="$1"/$DEBIAN_NAME-wrapper
+    rm -rf "$CROSS_WRAP_DIR"
+    mkdir -p "$CROSS_WRAP_DIR"
+
+    if [ "$DARWIN" = "yes" ] ; then
+        cat > "$CROSS_WRAP_DIR/sw_vers" <<EOF
+#!/bin/sh
+# Tiny utility for the real sw_vers some Makefiles need
+case \$1 in
+    -productVersion)
+        echo $DARWIN_MINVER
+        ;;
+    *)
+        echo "ERROR: Unknown switch \$1"
+        exit 1
+esac
+EOF
+    chmod 0755 "$CROSS_WRAP_DIR/sw_vers"
+    fi
+
+    DST_PREFIX=${CROSS_GCC%gcc}
+    $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh --src-prefix=$BINPREFIX --dst-prefix="$DST_PREFIX" "$CROSS_WRAP_DIR" \
+        --cflags="$HOST_CFLAGS" --cxxflags="$HOST_CFLAGS" --ldflags="$HOST_LDFLAGS"
+    # generate wrappers for BUILD toolchain
+    # this is required for mingw/darwin build to avoid tools canadian cross configuration issues
+    # 32-bit BUILD toolchain
+    LEGACY_TOOLCHAIN_DIR="$ANDROID_BUILD_TOP/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8"
+    $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh --src-prefix=i386-linux-gnu- \
+            --cflags="-m32" --cxxflags="-m32" --ldflags="-m elf_i386" --asflags="--32" \
+            --dst-prefix="$LEGACY_TOOLCHAIN_DIR/bin/x86_64-linux-" "$CROSS_WRAP_DIR"
+    $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh --src-prefix=i386-pc-linux-gnu- \
+            --cflags="-m32" --cxxflags="-m32" --ldflags="-m elf_i386" --asflags="--32" \
+            --dst-prefix="$LEGACY_TOOLCHAIN_DIR/bin/x86_64-linux-" "$CROSS_WRAP_DIR"
+    # 64-bit BUILD toolchain.  libbfd is still built in 32-bit.
+    $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh --src-prefix=x86_64-linux-gnu- \
+            --dst-prefix="$LEGACY_TOOLCHAIN_DIR/bin/x86_64-linux-" "$CROSS_WRAP_DIR"
+    $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh --src-prefix=x86_64-pc-linux-gnu- \
+            --dst-prefix="$LEGACY_TOOLCHAIN_DIR/bin/x86_64-linux-" "$CROSS_WRAP_DIR"
+    fail_panic "Could not create $DEBIAN_NAME wrapper toolchain in $CROSS_WRAP_DIR"
+
+    # 32-bit Windows toolchain (i686-w64-mingw32 -> x86_64-w64-mingw32 -m32)
+    local MINGW_TOOLCHAIN_DIR="$ANDROID_BUILD_TOP/prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8"
+    $NDK_BUILDTOOLS_PATH/gen-toolchain-wrapper.sh --src-prefix=i686-w64-mingw32- \
+            --cflags="-m32" --cxxflags="-m32" --ldflags="-m i386pe" --asflags="--32" \
+            --windres-flags="-F pe-i386" \
+            --dst-prefix="$MINGW_TOOLCHAIN_DIR/bin/x86_64-w64-mingw32-" "$CROSS_WRAP_DIR"
+
+    export PATH=$CROSS_WRAP_DIR:$PATH
+    dump "Using $DEBIAN_NAME wrapper: $CROSS_WRAP_DIR/${BINPREFIX}gcc"
+}
+
+handle_host ()
+{
+    if [ "$TRY64" != "yes" ]; then
+        force_32bit_binaries  # to modify HOST_TAG and others
+        HOST_BITS=32
+    fi
+    handle_canadian_build
+}
+
+prepare_common_build ()
+{
+    if [ "$MINGW" = "yes" -o "$DARWIN" = "yes" ]; then
+        if [ "$TRY64" = "yes" ]; then
+            HOST_BITS=64
+        else
+            HOST_BITS=32
+        fi
+        if [ "$MINGW" = "yes" ]; then
+            log "Generating $HOST_BITS-bit Windows binaries"
+        else
+            log "Generating $HOST_BITS-bit Darwin binaries"
+        fi
+        # Do *not* set CC and CXX when building the Windows/Darwin binaries in canadian build.
+        # Otherwise, the GCC configure/build script will mess that Canadian cross
+        # build in weird ways. Instead we rely on the toolchain detected or generated
+        # previously in prepare_canadian_toolchain.
+        unset CC CXX
+        return
+    fi
+
+    # On Linux, detect our legacy-compatible toolchain when in the Android
+    # source tree, and use it to force the generation of glibc-2.7 compatible
+    # binaries.
+    #
+    # We only do this if the CC variable is not defined to a given value
+    if [ -z "$CC" ]; then
+        LEGACY_TOOLCHAIN_DIR=
+        if [ "$HOST_OS" = "linux" ]; then
+            LEGACY_TOOLCHAIN_DIR="$ANDROID_BUILD_TOP/prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/bin"
+            LEGACY_TOOLCHAIN_PREFIX="$LEGACY_TOOLCHAIN_DIR/x86_64-linux-"
+        elif [ "$HOST_OS" = "darwin" ]; then
+            LEGACY_TOOLCHAIN_DIR="$ANDROID_BUILD_TOP/prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1/bin"
+            LEGACY_TOOLCHAIN_PREFIX="$LEGACY_TOOLCHAIN_DIR/i686-apple-darwin10-"
+        fi
+        log "Forcing generation of $HOST_OS binaries with legacy toolchain"
+        CC="${LEGACY_TOOLCHAIN_PREFIX}gcc"
+        CXX="${LEGACY_TOOLCHAIN_PREFIX}g++"
+        if [ ! -e "${CC}" ]; then
+            panic "${CC} does not exist."
+        fi
+    fi
+
+    CC=${CC:-gcc}
+    CXX=${CXX:-g++}
+    STRIP=${STRIP:-strip}
+    case $HOST_TAG in
+        darwin-*)
+            probe_darwin_sdk
+            ;;
+    esac
+
+    # Force generation of 32-bit binaries on 64-bit systems.
+    # We used to test the value of $HOST_TAG for *-x86_64, but this is
+    # not sufficient on certain systems.
+    #
+    # For example, Snow Leopard can be booted with a 32-bit kernel, running
+    # a 64-bit userland, with a compiler that generates 64-bit binaries by
+    # default *even* though "gcc -v" will report --target=i686-apple-darwin10!
+    #
+    # So know, simply probe for the size of void* by performing a small runtime
+    # compilation test.
+    #
+    cat > $TMPC <<EOF
+    /* this test should fail if the compiler generates 64-bit machine code */
+    int test_array[1-2*(sizeof(void*) != 4)];
+EOF
+    log_n "Checking whether the compiler generates 32-bit binaries..."
+    log $CC $HOST_CFLAGS -c -o $TMPO $TMPC
+    $CC $HOST_CFLAGS -c -o $TMPO $TMPC >$TMPL 2>&1
+    if [ $? != 0 ] ; then
+        log "no"
+        if [ "$TRY64" != "yes" ]; then
+            # NOTE: We need to modify the definitions of CC and CXX directly
+            #        here. Just changing the value of CFLAGS / HOST_CFLAGS
+            #        will not work well with the GCC toolchain scripts.
+            CC="$CC -m32"
+            CXX="$CXX -m32"
+        fi
+    else
+        log "yes"
+        if [ "$TRY64" = "yes" ]; then
+            CC="$CC -m64"
+            CXX="$CXX -m64"
+        fi
+    fi
+
+    if [ "$TRY64" = "yes" ]; then
+        HOST_BITS=64
+    else
+        force_32bit_binaries  # to modify HOST_TAG and others
+        HOST_BITS=32
+    fi
+}
+
+prepare_host_build ()
+{
+    prepare_common_build
+
+    # Now deal with mingw or darwin
+    if [ "$MINGW" = "yes" -o "$DARWIN" = "yes" ]; then
+        handle_canadian_build
+        CC=$ABI_CONFIGURE_HOST-gcc
+        CXX=$ABI_CONFIGURE_HOST-g++
+        CPP=$ABI_CONFIGURE_HOST-cpp
+        LD=$ABI_CONFIGURE_HOST-ld
+        AR=$ABI_CONFIGURE_HOST-ar
+        AS=$ABI_CONFIGURE_HOST-as
+        RANLIB=$ABI_CONFIGURE_HOST-ranlib
+        STRIP=$ABI_CONFIGURE_HOST-strip
+        export CC CXX CPP LD AR AS RANLIB STRIP
+    fi
+}
+
+prepare_abi_configure_build ()
+{
+    # detect build tag
+    case $HOST_TAG in
+        linux-x86)
+            ABI_CONFIGURE_BUILD=i386-linux-gnu
+            ;;
+        linux-x86_64)
+            ABI_CONFIGURE_BUILD=x86_64-linux-gnu
+            ;;
+        darwin-x86)
+            ABI_CONFIGURE_BUILD=i686-apple-darwin
+            ;;
+        darwin-x86_64)
+            ABI_CONFIGURE_BUILD=x86_64-apple-darwin
+            ;;
+        windows)
+            ABI_CONFIGURE_BUILD=i686-pc-cygwin
+            ;;
+        *)
+            echo "ERROR: Unsupported HOST_TAG: $HOST_TAG"
+            echo "Please update 'prepare_host_flags' in build/tools/prebuilt-common.sh"
+            ;;
+    esac
+}
+
+prepare_target_build ()
+{
+    prepare_abi_configure_build
+
+    # By default, assume host == build
+    ABI_CONFIGURE_HOST="$ABI_CONFIGURE_BUILD"
+
+    prepare_common_build
+    HOST_GMP_ABI=$HOST_BITS
+
+    # Now handle the --mingw/--darwin flag
+    if [ "$MINGW" = "yes" -o "$DARWIN" = "yes" ] ; then
+        handle_canadian_build
+        STRIP=$ABI_CONFIGURE_HOST-strip
+        if [ "$MINGW" = "yes" ] ; then
+            # It turns out that we need to undefine this to be able to
+            # perform a canadian-cross build with mingw. Otherwise, the
+            # GMP configure scripts will not be called with the right options
+            HOST_GMP_ABI=
+        fi
+    fi
+}
+
+# $1: Toolchain name
+#
+parse_toolchain_name ()
+{
+    TOOLCHAIN=$1
+    if [ -z "$TOOLCHAIN" ] ; then
+        echo "ERROR: Missing toolchain name!"
+        exit 1
+    fi
+
+    ABI_CFLAGS_FOR_TARGET=
+    ABI_CXXFLAGS_FOR_TARGET=
+
+    # Determine ABI based on toolchain name
+    #
+    case "$TOOLCHAIN" in
+    arm-linux-androideabi-*)
+        ARCH="arm"
+        ABI="armeabi"
+        ABI_CONFIGURE_TARGET="arm-linux-androideabi"
+        ABI_CONFIGURE_EXTRA_FLAGS="--with-arch=armv5te"
+        ;;
+    arm-eabi-*)
+        ARCH="arm"
+        ABI="armeabi"
+        ABI_CONFIGURE_TARGET="arm-eabi"
+        ABI_CONFIGURE_EXTRA_FLAGS="--with-arch=armv5te --disable-gold --disable-libgomp"
+        ;;
+    aarch64-linux-android-*)
+        ARCH="arm64"
+        ABI="arm64-v8a"
+        ABI_CONFIGURE_TARGET="aarch64-linux-android"
+        # Reserve the platform register, x18. This should happen automatically
+        # with clang but we need to pass it manually when compiling with gcc.
+        ABI_CFLAGS_FOR_TARGET="-ffixed-x18"
+        ABI_CXXFLAGS_FOR_TARGET="-ffixed-x18"
+        ;;
+    x86-*)
+        ARCH="x86"
+        ABI=$ARCH
+        ABI_INSTALL_NAME="x86"
+        ABI_CONFIGURE_TARGET="i686-linux-android"
+        # Enable C++ exceptions, RTTI and GNU libstdc++ at the same time
+        # You can't really build these separately at the moment.
+        ABI_CFLAGS_FOR_TARGET="-fPIC"
+        ;;
+    x86_64-*)
+        ARCH="x86_64"
+        ABI=$ARCH
+        ABI_INSTALL_NAME="x86_64"
+        ABI_CONFIGURE_TARGET="x86_64-linux-android"
+        # Enable C++ exceptions, RTTI and GNU libstdc++ at the same time
+        # You can't really build these separately at the moment.
+        ABI_CFLAGS_FOR_TARGET="-fPIC"
+        ;;
+    mipsel*)
+        ARCH="mips"
+        ABI=$ARCH
+        ABI_INSTALL_NAME="mips"
+        ABI_CONFIGURE_TARGET="mipsel-linux-android"
+        # Set default to mips32
+        ABI_CONFIGURE_EXTRA_FLAGS="--with-arch=mips32"
+        # Enable C++ exceptions, RTTI and GNU libstdc++ at the same time
+        # You can't really build these separately at the moment.
+        # Add -fpic, because MIPS NDK will need to link .a into .so.
+        ABI_CFLAGS_FOR_TARGET="-fexceptions -fpic"
+        ABI_CXXFLAGS_FOR_TARGET="-frtti -fpic"
+        # Add --disable-fixed-point to disable fixed-point support
+        ABI_CONFIGURE_EXTRA_FLAGS="$ABI_CONFIGURE_EXTRA_FLAGS --disable-fixed-point"
+        ;;
+    mips64el*)
+        ARCH="mips64"
+        ABI=$ARCH
+        ABI_INSTALL_NAME="mips64"
+        ABI_CONFIGURE_TARGET="mips64el-linux-android"
+        # Set default to mips64r6
+        ABI_CONFIGURE_EXTRA_FLAGS="--with-arch=mips64r6"
+        # Enable C++ exceptions, RTTI and GNU libstdc++ at the same time
+        # You can't really build these separately at the moment.
+        # Add -fpic, because MIPS NDK will need to link .a into .so.
+        ABI_CFLAGS_FOR_TARGET="-fexceptions -fpic"
+        ABI_CXXFLAGS_FOR_TARGET="-frtti -fpic"
+        # Add --disable-fixed-point to disable fixed-point support
+        ABI_CONFIGURE_EXTRA_FLAGS="$ABI_CONFIGURE_EXTRA_FLAGS --disable-fixed-point"
+        ;;
+    * )
+        echo "Invalid toolchain specified. Expected (arm-linux-androideabi-*|arm-eabi-*|x86-*|mipsel*|mips64el*)"
+        echo ""
+        print_help
+        exit 1
+        ;;
+    esac
+
+    log "Targetting CPU: $ARCH"
+
+    GCC_VERSION=`expr -- "$TOOLCHAIN" : '.*-\([0-9x\.]*\)'`
+    log "Using GCC version: $GCC_VERSION"
+}
+
+# Return the host "tag" used to identify prebuilt host binaries.
+# NOTE: Handles the case where '$MINGW = true' or '$DARWIN = true'
+# For now, valid values are: linux-x86, darwin-x86 and windows
+get_prebuilt_host_tag ()
+{
+    local RET=$HOST_TAG
+    if [ "$MINGW" = "yes" ]; then
+        if [ "$TRY64" = "no" ]; then
+            RET=windows
+        else
+            RET=windows-x86_64
+        fi
+    fi
+    if [ "$DARWIN" = "yes" ]; then
+        RET=darwin-x86_64  # let the following handles 32-bit case
+    fi
+    case $RET in
+        linux-*)
+            RET=linux-x86_64
+            ;;
+        darwin-*)
+            RET=darwin-x86_64
+            ;;
+    esac
+    echo $RET
+}
+
+# Return the executable suffix corresponding to host executables
+get_prebuilt_host_exe_ext ()
+{
+    if [ "$MINGW" = "yes" ]; then
+        echo ".exe"
+    else
+        echo ""
+    fi
+}
+
+# Get library suffix for given ABI
+# $1: ABI
+# Return: .so or .bc
+get_lib_suffix_for_abi ()
+{
+    local ABI=$1
+    echo ".so"
+}
+
+# Convert an ABI name into an Architecture name
+# $1: ABI name
+# Result: Arch name
+convert_abi_to_arch ()
+{
+    local RET
+    local ABI=$1
+    case $ABI in
+        armeabi|armeabi-v7a)
+            RET=arm
+            ;;
+        x86|mips|x86_64|mips64)
+            RET=$ABI
+            ;;
+        mips32r6)
+            RET=mips
+            ;;
+        arm64-v8a)
+            RET=arm64
+            ;;
+        *)
+            >&2 echo "ERROR: Unsupported ABI name: $ABI, use one of: armeabi, armeabi-v7a, x86, mips, arm64-v8a, x86_64 or mips64"
+            exit 1
+            ;;
+    esac
+    echo "$RET"
+}
+
+# Take architecture name as input, and output the list of corresponding ABIs
+# Inverse for convert_abi_to_arch
+# $1: ARCH name
+# Out: ABI names list (comma-separated)
+convert_arch_to_abi ()
+{
+    local RET
+    local ARCH=$1
+    case $ARCH in
+        arm)
+            RET=armeabi,armeabi-v7a
+            ;;
+        x86|x86_64|mips|mips64)
+            RET=$ARCH
+            ;;
+        arm64)
+            RET=arm64-v8a
+            ;;
+        *)
+            >&2 echo "ERROR: Unsupported ARCH name: $ARCH, use one of: arm, x86, mips"
+            exit 1
+            ;;
+    esac
+    echo "$RET"
+}
+
+# Take a list of architecture names as input, and output the list of corresponding ABIs
+# $1: ARCH names list (separated by spaces or commas)
+# Out: ABI names list (comma-separated)
+convert_archs_to_abis ()
+{
+    local RET
+    for ARCH in $(commas_to_spaces $@); do
+       ABI=$(convert_arch_to_abi $ARCH)
+       if [ -n "$ABI" ]; then
+          if [ -n "$RET" ]; then
+             RET=$RET",$ABI"
+          else
+             RET=$ABI
+          fi
+       else   # Error message is printed by convert_arch_to_abi
+          exit 1
+       fi
+    done
+    echo "$RET"
+}
+
+# Return the default toolchain binary path prefix for given architecture and gcc version
+# For example: arm 4.8 -> toolchains/<system>/arm-linux-androideabi-4.8/bin/arm-linux-androideabi-
+# $1: Architecture name
+# $2: GCC version
+# $3: optional, system name, defaults to $HOST_TAG
+get_toolchain_binprefix_for_arch ()
+{
+    local NAME PREFIX DIR BINPREFIX
+    local SYSTEM=${3:-$(get_prebuilt_host_tag)}
+    NAME=$(get_toolchain_name_for_arch $1 $2)
+    PREFIX=$(get_default_toolchain_prefix_for_arch $1)
+    DIR=$(get_toolchain_install . $NAME $SYSTEM)
+    BINPREFIX=${DIR#./}/bin/$PREFIX-
+    echo "$BINPREFIX"
+}
+
+# Return llvm toolchain binary path prefix for given llvm version
+# $1: optional, system name, defaults to $HOST_TAG
+get_llvm_toolchain_binprefix ()
+{
+    local NAME DIR BINPREFIX
+    local SYSTEM=${1:-$(get_prebuilt_host_tag)}
+    local VERSION=r365631c
+    SYSTEM=${SYSTEM%_64} # Trim _64 suffix. We only have one LLVM.
+    BINPREFIX=$ANDROID_BUILD_TOP/prebuilts/clang/host/$SYSTEM/clang-$VERSION/bin
+    echo "$BINPREFIX"
+}
+
+# Return default API level for a given arch
+# This is the level used to build the toolchains.
+#
+# $1: Architecture name
+get_default_api_level_for_arch ()
+{
+    # For now, always build the toolchain against API level 14 for 32-bit arch
+    # and API level $FIRST_API64_LEVEL for 64-bit arch
+    case $1 in
+        *64) echo $FIRST_API64_LEVEL ;;
+        *) echo 14 ;;
+    esac
+}
+
+# Return the default platform sysroot corresponding to a given architecture
+# This is the sysroot used to build the toolchain and other binaries like
+# the STLport libraries.
+# $1: Architecture name
+get_default_platform_sysroot_for_arch ()
+{
+    local ARCH=$1
+    local LEVEL=$(get_default_api_level_for_arch $ARCH)
+
+    if [ "$ARCH" != "${ARCH%%64*}" ] ; then
+        LEVEL=$FIRST_API64_LEVEL
+    fi
+    echo "platforms/android-$LEVEL/arch-$ARCH"
+}
+
+# Return the default platform sysroot corresponding to a given abi
+# $1: ABI
+get_default_platform_sysroot_for_abi ()
+{
+    local ARCH=$(convert_abi_to_arch $1)
+    $(get_default_platform_sysroot_for_arch $ARCH)
+}
+
+# Return the default libs dir corresponding to a given architecture
+# $1: Architecture name
+get_default_libdir_for_arch ()
+{
+    case $1 in
+      x86_64|mips64) echo "lib64" ;;
+      arm64) echo "lib" ;; # return "lib" until aarch64 is built to look for sysroot/usr/lib64
+      *) echo "lib" ;;
+    esac
+}
+
+# Return the default libs dir corresponding to a given abi
+# $1: ABI
+get_default_libdir_for_abi ()
+{
+    local ARCH
+
+    case $1 in
+      mips32r6) echo "libr6" ;;
+      *)
+        local ARCH=$(convert_abi_to_arch $1)
+        echo "$(get_default_libdir_for_arch $ARCH)"
+        ;;
+    esac
+}
+
+# Return the host/build specific path for prebuilt toolchain binaries
+# relative to $1.
+#
+# $1: target root NDK directory
+# $2: toolchain name
+# $3: optional, host system name
+#
+get_toolchain_install ()
+{
+    local NDK="$1"
+    shift
+    echo "$NDK/$(get_toolchain_install_subdir "$@")"
+}
+
+# $1: toolchain name
+# $2: optional, host system name
+get_toolchain_install_subdir ()
+{
+    local SYSTEM=${2:-$(get_prebuilt_host_tag)}
+    echo "toolchains/$SYSTEM/$1"
+}
+
+# Return the relative install prefix for prebuilt host
+# executables (relative to the NDK top directory).
+#
+# Out: relative path to prebuilt install prefix
+get_prebuilt_install_prefix ()
+{
+    echo "host-tools"
+}
+
+# Return the relative path of an installed prebuilt host
+# executable.
+#
+# $1: executable name
+# Out: path to prebuilt host executable, relative
+get_prebuilt_host_exec ()
+{
+    local PREFIX EXE
+    PREFIX=$(get_prebuilt_install_prefix)
+    EXE=$(get_prebuilt_host_exe_ext)
+    echo "$PREFIX/bin/$1$EXE"
+}
+
+# Return the name of a given host executable
+# $1: executable base name
+# Out: executable name, with optional suffix (e.g. .exe for windows)
+get_host_exec_name ()
+{
+    local EXE=$(get_prebuilt_host_exe_ext)
+    echo "$1$EXE"
+}
+
+# Return the directory where host-specific binaries are installed.
+# $1: target root NDK directory
+get_host_install ()
+{
+    echo "$1/$(get_prebuilt_install_prefix)"
+}
+
+# Set the toolchain target NDK location.
+# this sets TOOLCHAIN_PATH and TOOLCHAIN_PREFIX
+# $1: target NDK path
+# $2: toolchain name
+set_toolchain_ndk ()
+{
+    TOOLCHAIN_PATH=`get_toolchain_install "$1" $2`
+    log "Using toolchain path: $TOOLCHAIN_PATH"
+
+    TOOLCHAIN_PREFIX=$TOOLCHAIN_PATH/bin/$ABI_CONFIGURE_TARGET
+    log "Using toolchain prefix: $TOOLCHAIN_PREFIX"
+}
+
+# Check that a toolchain is properly installed at a target NDK location
+#
+# $1: target root NDK directory
+# $2: toolchain name
+#
+check_toolchain_install ()
+{
+    TOOLCHAIN_PATH=`get_toolchain_install "$1" $2`
+    if [ ! -d "$TOOLCHAIN_PATH" ] ; then
+        echo "ERROR: Cannot find directory '$TOOLCHAIN_PATH'!"
+        echo "       Toolchain '$2' not installed in '$NDK_DIR'!"
+        echo "       Ensure that the toolchain has been installed there before."
+        exit 1
+    fi
+
+    set_toolchain_ndk $1 $2
+}
+
+# $1: toolchain source directory
+check_toolchain_src_dir ()
+{
+    local SRC_DIR="$1"
+    if [ -z "$SRC_DIR" ]; then
+        echo "ERROR: Please provide the path to the toolchain source tree. See --help"
+        exit 1
+    fi
+
+    if [ ! -d "$SRC_DIR" ]; then
+        echo "ERROR: Not a directory: '$SRC_DIR'"
+        exit 1
+    fi
+
+    if [ ! -f "$SRC_DIR/build/configure" -o ! -d "$SRC_DIR/gcc" ]; then
+        echo "ERROR: Either the file $SRC_DIR/build/configure or"
+        echo "       the directory $SRC_DIR/gcc does not exist."
+        echo "This is not the top of a toolchain tree: $SRC_DIR"
+        exit 1
+    fi
+}
+
+make_repo_prop () {
+    local OUT_PATH="$1/repo.prop"
+
+    # The build server generates a repo.prop file that contains the current SHAs
+    # of each project.
+    if [ -f $DIST_DIR/repo.prop ]; then
+        cp $DIST_DIR/repo.prop $OUT_PATH
+    else
+        # Generate our own if we're building locally.
+        pushd $ANDROID_NDK_ROOT
+        repo forall \
+            -c 'echo $REPO_PROJECT $(git rev-parse HEAD)' > $OUT_PATH
+        popd
+    fi
+}
+
+#
+# Define HOST_TAG32, as the 32-bit version of HOST_TAG
+# We do this by replacing an -x86_64 suffix by -x86
+HOST_TAG32=$HOST_TAG
+case $HOST_TAG32 in
+    *-x86_64)
+        HOST_TAG32=${HOST_TAG%%_64}
+        ;;
+esac
diff --git a/build/tools/pylintrc b/build/tools/pylintrc
new file mode 120000
index 0000000..e242284
--- /dev/null
+++ b/build/tools/pylintrc
@@ -0,0 +1 @@
+../../pylintrc
\ No newline at end of file
diff --git a/build/tools/toolchain-licenses/COPYING b/build/tools/toolchain-licenses/COPYING
new file mode 100644
index 0000000..623b625
--- /dev/null
+++ b/build/tools/toolchain-licenses/COPYING
@@ -0,0 +1,340 @@
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+     51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year  name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
diff --git a/build/tools/toolchain-licenses/COPYING.LIB b/build/tools/toolchain-licenses/COPYING.LIB
new file mode 100644
index 0000000..2d2d780
--- /dev/null
+++ b/build/tools/toolchain-licenses/COPYING.LIB
@@ -0,0 +1,510 @@
+
+                  GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+	51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL.  It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+                            Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+  This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it.  You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations
+below.
+
+  When we speak of free software, we are referring to freedom of use,
+not price.  Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+  To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights.  These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+  For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you.  You must make sure that they, too, receive or can get the source
+code.  If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it.  And you must show them these terms so they know their rights.
+
+  We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+  To protect each distributor, we want to make it very clear that
+there is no warranty for the free library.  Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+  Finally, software patents pose a constant threat to the existence of
+any free program.  We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder.  Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+  Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License.  This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License.  We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+  When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library.  The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom.  The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+  We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License.  It also provides other free software developers Less
+of an advantage over competing non-free programs.  These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries.  However, the Lesser license provides advantages in certain
+special circumstances.
+
+  For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it
+becomes a de-facto standard.  To achieve this, non-free programs must
+be allowed to use the library.  A more frequent case is that a free
+library does the same job as widely used non-free libraries.  In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+  In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software.  For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+  Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.  Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library".  The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+                  GNU LESSER GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+  A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+  The "Library", below, refers to any such software library or work
+which has been distributed under these terms.  A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language.  (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+  "Source code" for a work means the preferred form of the work for
+making modifications to it.  For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control
+compilation and installation of the library.
+
+  Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it).  Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+  1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+  You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+  2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) The modified work must itself be a software library.
+
+    b) You must cause the files modified to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    c) You must cause the whole of the work to be licensed at no
+    charge to all third parties under the terms of this License.
+
+    d) If a facility in the modified Library refers to a function or a
+    table of data to be supplied by an application program that uses
+    the facility, other than as an argument passed when the facility
+    is invoked, then you must make a good faith effort to ensure that,
+    in the event an application does not supply such function or
+    table, the facility still operates, and performs whatever part of
+    its purpose remains meaningful.
+
+    (For example, a function in a library to compute square roots has
+    a purpose that is entirely well-defined independent of the
+    application.  Therefore, Subsection 2d requires that any
+    application-supplied function or table used by this function must
+    be optional: if the application does not supply it, the square
+    root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library.  To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License.  (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.)  Do not make any other change in
+these notices.
+
+  Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+  This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+  4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+  If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library".  Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+  However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library".  The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+  When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library.  The
+threshold for this to be true is not precisely defined by law.
+
+  If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work.  (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+  Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+  6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+  You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License.  You must supply a copy of this License.  If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License.  Also, you must do one
+of these things:
+
+    a) Accompany the work with the complete corresponding
+    machine-readable source code for the Library including whatever
+    changes were used in the work (which must be distributed under
+    Sections 1 and 2 above); and, if the work is an executable linked
+    with the Library, with the complete machine-readable "work that
+    uses the Library", as object code and/or source code, so that the
+    user can modify the Library and then relink to produce a modified
+    executable containing the modified Library.  (It is understood
+    that the user who changes the contents of definitions files in the
+    Library will not necessarily be able to recompile the application
+    to use the modified definitions.)
+
+    b) Use a suitable shared library mechanism for linking with the
+    Library.  A suitable mechanism is one that (1) uses at run time a
+    copy of the library already present on the user's computer system,
+    rather than copying library functions into the executable, and (2)
+    will operate properly with a modified version of the library, if
+    the user installs one, as long as the modified version is
+    interface-compatible with the version that the work was made with.
+
+    c) Accompany the work with a written offer, valid for at least
+    three years, to give the same user the materials specified in
+    Subsection 6a, above, for a charge no more than the cost of
+    performing this distribution.
+
+    d) If distribution of the work is made by offering access to copy
+    from a designated place, offer equivalent access to copy the above
+    specified materials from the same place.
+
+    e) Verify that the user has already received a copy of these
+    materials or that you have already sent this user a copy.
+
+  For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it.  However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+  It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system.  Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+  7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+    a) Accompany the combined library with a copy of the same work
+    based on the Library, uncombined with any other library
+    facilities.  This must be distributed under the terms of the
+    Sections above.
+
+    b) Give prominent notice with the combined library of the fact
+    that part of it is a work based on the Library, and explaining
+    where to find the accompanying uncombined form of the same work.
+
+  8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License.  Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License.  However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+  9. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Library or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+  10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+  11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply, and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License
+may add an explicit geographical distribution limitation excluding those
+countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation.  If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+  14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission.  For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this.  Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+                            NO WARRANTY
+
+  15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU.  SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+                     END OF TERMS AND CONDITIONS
+
+           How to Apply These Terms to Your New Libraries
+
+  If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change.  You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms
+of the ordinary General Public License).
+
+  To apply these terms, attach the following notices to the library.
+It is safest to attach them to the start of each source file to most
+effectively convey the exclusion of warranty; and each file should
+have at least the "copyright" line and a pointer to where the full
+notice is found.
+
+
+    <one line to give the library's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This library is free software; you can redistribute it and/or
+    modify it under the terms of the GNU Lesser General Public
+    License as published by the Free Software Foundation; either
+    version 2.1 of the License, or (at your option) any later version.
+
+    This library is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    Lesser General Public License for more details.
+
+    You should have received a copy of the GNU Lesser General Public
+    License along with this library; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or
+your school, if any, to sign a "copyright disclaimer" for the library,
+if necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the
+  library `Frob' (a library for tweaking knobs) written by James
+  Random Hacker.
+
+  <signature of Ty Coon>, 1 April 1990
+  Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff --git a/build/tools/toolchain-licenses/COPYING.RUNTIME b/build/tools/toolchain-licenses/COPYING.RUNTIME
new file mode 100644
index 0000000..e1b3c69
--- /dev/null
+++ b/build/tools/toolchain-licenses/COPYING.RUNTIME
@@ -0,0 +1,73 @@
+GCC RUNTIME LIBRARY EXCEPTION
+
+Version 3.1, 31 March 2009
+
+Copyright (C) 2009 Free Software Foundation, Inc. <http://fsf.org/>
+
+Everyone is permitted to copy and distribute verbatim copies of this
+license document, but changing it is not allowed.
+
+This GCC Runtime Library Exception ("Exception") is an additional
+permission under section 7 of the GNU General Public License, version
+3 ("GPLv3"). It applies to a given file (the "Runtime Library") that
+bears a notice placed by the copyright holder of the file stating that
+the file is governed by GPLv3 along with this Exception.
+
+When you use GCC to compile a program, GCC may combine portions of
+certain GCC header files and runtime libraries with the compiled
+program. The purpose of this Exception is to allow compilation of
+non-GPL (including proprietary) programs to use, in this way, the
+header files and runtime libraries covered by this Exception.
+
+0. Definitions.
+
+A file is an "Independent Module" if it either requires the Runtime
+Library for execution after a Compilation Process, or makes use of an
+interface provided by the Runtime Library, but is not otherwise based
+on the Runtime Library.
+
+"GCC" means a version of the GNU Compiler Collection, with or without
+modifications, governed by version 3 (or a specified later version) of
+the GNU General Public License (GPL) with the option of using any
+subsequent versions published by the FSF.
+
+"GPL-compatible Software" is software whose conditions of propagation,
+modification and use would permit combination with GCC in accord with
+the license of GCC.
+
+"Target Code" refers to output from any compiler for a real or virtual
+target processor architecture, in executable form or suitable for
+input to an assembler, loader, linker and/or execution
+phase. Notwithstanding that, Target Code does not include data in any
+format that is used as a compiler intermediate representation, or used
+for producing a compiler intermediate representation.
+
+The "Compilation Process" transforms code entirely represented in
+non-intermediate languages designed for human-written code, and/or in
+Java Virtual Machine byte code, into Target Code. Thus, for example,
+use of source code generators and preprocessors need not be considered
+part of the Compilation Process, since the Compilation Process can be
+understood as starting with the output of the generators or
+preprocessors.
+
+A Compilation Process is "Eligible" if it is done using GCC, alone or
+with other GPL-compatible software, or if it is done without using any
+work based on GCC. For example, using non-GPL-compatible Software to
+optimize any GCC intermediate representations would not qualify as an
+Eligible Compilation Process.
+
+1. Grant of Additional Permission.
+
+You have permission to propagate a work of Target Code formed by
+combining the Runtime Library with Independent Modules, even if such
+propagation would otherwise violate the terms of GPLv3, provided that
+all Target Code was generated by Eligible Compilation Processes. You
+may then convey such a combination under terms of your choice,
+consistent with the licensing of the Independent Modules.
+
+2. No Weakening of GCC Copyleft.
+
+The availability of this Exception does not imply any general
+presumption that third-party software is unaffected by the copyleft
+requirements of the license of GCC.
+
diff --git a/build/tools/toolchain-licenses/COPYING3 b/build/tools/toolchain-licenses/COPYING3
new file mode 100644
index 0000000..94a9ed0
--- /dev/null
+++ b/build/tools/toolchain-licenses/COPYING3
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<http://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/build/tools/toolchain-licenses/COPYING3.LIB b/build/tools/toolchain-licenses/COPYING3.LIB
new file mode 100644
index 0000000..fc8a5de
--- /dev/null
+++ b/build/tools/toolchain-licenses/COPYING3.LIB
@@ -0,0 +1,165 @@
+		   GNU LESSER GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+  This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+  0. Additional Definitions. 
+
+  As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+  "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+  An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+  A "Combined Work" is a work produced by combining or linking an
+Application with the Library.  The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+  The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+  The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+  1. Exception to Section 3 of the GNU GPL.
+
+  You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+  2. Conveying Modified Versions.
+
+  If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+   a) under this License, provided that you make a good faith effort to
+   ensure that, in the event an Application does not supply the
+   function or data, the facility still operates, and performs
+   whatever part of its purpose remains meaningful, or
+
+   b) under the GNU GPL, with none of the additional permissions of
+   this License applicable to that copy.
+
+  3. Object Code Incorporating Material from Library Header Files.
+
+  The object code form of an Application may incorporate material from
+a header file that is part of the Library.  You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+   a) Give prominent notice with each copy of the object code that the
+   Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the object code with a copy of the GNU GPL and this license
+   document.
+
+  4. Combined Works.
+
+  You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+   a) Give prominent notice with each copy of the Combined Work that
+   the Library is used in it and that the Library and its use are
+   covered by this License.
+
+   b) Accompany the Combined Work with a copy of the GNU GPL and this license
+   document.
+
+   c) For a Combined Work that displays copyright notices during
+   execution, include the copyright notice for the Library among
+   these notices, as well as a reference directing the user to the
+   copies of the GNU GPL and this license document.
+
+   d) Do one of the following:
+
+       0) Convey the Minimal Corresponding Source under the terms of this
+       License, and the Corresponding Application Code in a form
+       suitable for, and under terms that permit, the user to
+       recombine or relink the Application with a modified version of
+       the Linked Version to produce a modified Combined Work, in the
+       manner specified by section 6 of the GNU GPL for conveying
+       Corresponding Source.
+
+       1) Use a suitable shared library mechanism for linking with the
+       Library.  A suitable mechanism is one that (a) uses at run time
+       a copy of the Library already present on the user's computer
+       system, and (b) will operate properly with a modified version
+       of the Library that is interface-compatible with the Linked
+       Version. 
+
+   e) Provide Installation Information, but only if you would otherwise
+   be required to provide such information under section 6 of the
+   GNU GPL, and only to the extent that such information is
+   necessary to install and execute a modified version of the
+   Combined Work produced by recombining or relinking the
+   Application with a modified version of the Linked Version. (If
+   you use option 4d0, the Installation Information must accompany
+   the Minimal Corresponding Source and Corresponding Application
+   Code. If you use option 4d1, you must provide the Installation
+   Information in the manner specified by section 6 of the GNU GPL
+   for conveying Corresponding Source.)
+
+  5. Combined Libraries.
+
+  You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+   a) Accompany the combined library with a copy of the same work based
+   on the Library, uncombined with any other library facilities,
+   conveyed under the terms of this License.
+
+   b) Give prominent notice with the combined library that part of it
+   is a work based on the Library, and explaining where to find the
+   accompanying uncombined form of the same work.
+
+  6. Revised Versions of the GNU Lesser General Public License.
+
+  The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+  Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+  If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/checkbuild.py b/checkbuild.py
index f724875..a9037a2 100755
--- a/checkbuild.py
+++ b/checkbuild.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2018 The Android Open Source Project
 #
@@ -14,14 +14,53 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Shortcut for ndk/checkbuild.py."""
-import ndk.checkbuild
+"""Shortcut for ndk/checkbuild.py.
+
+Differs from do_checkbuild.py because it launches a new Python interpreter,
+allowing this script to bootstrap our build with a specific version of Python.
+"""
+import argparse
+import logging
+import os
+import subprocess
+import sys
+
+from bootstrap import bootstrap
 
 
-def main() -> None:
-    """Trampoline into the builder defined in the ndk package."""
-    ndk.checkbuild.main()
+THIS_DIR = os.path.realpath(os.path.dirname(__file__))
 
 
-if __name__ == "__main__":
+def parse_args():
+    """Parses and returns command line arguments."""
+    # Don't add help because it inhibits the real checkbuild.py's --help.
+    parser = argparse.ArgumentParser(add_help=False)
+    parser.add_argument(
+        '-v',
+        '--verbose',
+        action='count',
+        dest='verbosity',
+        default=0,
+        help='Increase logging verbosity.')
+    return parser.parse_known_args()
+
+
+def main():
+    """Program entry point.
+
+    Bootstraps the real checkbuild wrapper, do_checkbuild.py.
+    """
+    args, _ = parse_args()
+
+    if args.verbosity >= 2:
+        logging.basicConfig(level=logging.DEBUG)
+    else:
+        logging.basicConfig(level=logging.INFO)
+
+    bootstrap()
+    subprocess.check_call(
+        ['python3', os.path.join(THIS_DIR, 'do_checkbuild.py')] + sys.argv[1:])
+
+
+if __name__ == '__main__':
     main()
diff --git a/build/cmake/hooks/pre/Android-Clang.cmake b/do_checkbuild.py
old mode 100644
new mode 100755
similarity index 64%
rename from build/cmake/hooks/pre/Android-Clang.cmake
rename to do_checkbuild.py
index 929a37b..001e463
--- a/build/cmake/hooks/pre/Android-Clang.cmake
+++ b/do_checkbuild.py
@@ -1,4 +1,6 @@
-# Copyright (C) 2020 The Android Open Source Project
+#!/usr/bin/env python
+#
+# Copyright (C) 2018 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,6 +13,14 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+#
+"""Shortcut for ndk/checkbuild.py.
 
-# This is a hook file that will be included by cmake at the beginning of
-# Modules/Platform/Android-Clang.cmake.
+Differs from the actual checkbuild.py in this directory because it uses the
+current Python interpreter.
+"""
+import ndk.checkbuild
+
+
+if __name__ == '__main__':
+    ndk.checkbuild.main()
diff --git a/docs/Architecture.md b/docs/Architecture.md
deleted file mode 100644
index 6d0958b..0000000
--- a/docs/Architecture.md
+++ /dev/null
@@ -1,199 +0,0 @@
-# Architecture
-
-The latest version of this document is available at
-https://android.googlesource.com/platform/ndk/+/master/docs/Architecture.md.
-
-The core NDK is the zip file that is built in this repository and distributed by
-the SDK manager. It bundles the outputs of several other projects into a package
-that is directly usable by app developers, and also includes a few projects
-maintained directly in this repository.
-
-More broadly, "the NDK" can refer to the C ABI exposed to apps by the OS (AKA
-"the platform").
-
-## Code map
-
-The code in the NDK repo (the "repo" repository, the meta-repo that was created
-with `repo init` and `repo sync`, the parent directory of this git repo) is
-organized as follows:
-
-### bionic
-
-The source for bionic, Android's libc (and friends). The sources and includes
-for building the CRT objects come from this repository.
-
-### development
-
-The repository itself is overly broad. We include it for the adb python package.
-
-### external
-
-Most third-party code lives in external. For example, this is where googletest
-and some of the vulkan code lives.
-
-### ndk
-
-The main NDK repository. This is where the build systems and the NDK's own build
-and test systems live. This directory is organized as:
-
-#### Main directory
-
-The main directory contains the entry points to the build (`checkbuild.py`) and
-test (`run_tests.py`) scripts, as well as Python configuration files like
-mypy.ini and pylintrc. The other loose files in this directory such as ndk-gdb
-are the sources for tools that are shipped in the NDK that should probably be
-moved into their own directory for clarity.
-
-#### bootstrap
-
-Python 2/3 library for bootstrapping our build and test tools with an up to date
-Python 3.
-
-#### build
-
-Contains the build systems shipped in the NDK:
-
-* CMake toolchain files
-* ndk-build
-* `make_standalone_toolchain.py`
-
-#### docs
-
-Documentation primarily for core NDK development. Some additional documentation
-lives here as well but most user documentation lives in google3.
-
-#### infra
-
-Some infrastructure scripts like a Dockerfile that can be used to build the NDK.
-
-#### meta
-
-Metadata for the NDK intended for consumption by external tools and build
-systems to avoid needing to hard code or infer properties like the minimum and
-maximum OS versions or ABIs supported.
-
-#### ndk
-
-The Python package used for building and testing the NDK. The top level
-`checkbuild.py` and `run_tests.py` scripts call into this package.
-
-#### samples
-
-Sample projects to use for non-automated testing.
-
-#### scripts
-
-Additional scripts used for NDK development and release processes. Some of these
-scripts may be unfinished or unused, but the development and release
-documentation will guide you to the correct ones.
-
-#### sources
-
-Sources for tools and libraries shipped in the NDK that are not maintained in a
-separate repository.
-
-#### tests
-
-The NDK's tests. See [Testing.md](Testing.md) for more information.
-
-#### wrap.sh
-
-Premade [wrap.sh](https://developer.android.com/ndk/guides/wrap-script) scripts
-for apps.
-
-### prebuilts
-
-Prebuilt toolchains and libraries used or shipped (or both) by the NDK. The LLVM
-toolchain we ship is in prebuilts/clang, and the sysroot is in prebuilts/ndk.
-
-### toolchain
-
-Sources for the toolchain and other build components. LLVM lives in
-toolchain/llvm-project.
-
-## Core NDK
-
-The NDK components can be loosely grouped into the toolchain (the compiler as
-well as its supporting tools and libraries), build systems, and support
-libraries.
-
-For more information, see the [Build System Maintainers] guide.
-
-[Build System Maintainers]: docs/BuildSystemMaintainers.md
-
-### Toolchain
-
-The NDK's toolchain is LLVM. This means the NDK uses Clang as its compiler and
-the rest of the LLVM suite for other tasks (LLD for linking, llvm-ar for static
-library creation, etc).
-
-The toolchain is delivered to the NDK in a prebuilt form via the prebuilts/clang
-repositories. The version of the toolchain to be used is defined (at the time of
-writing) by `ndk.toolchains.CLANG_VERSION`.
-
-Documentation for using the NDK toolchain can be found in the [Build System
-Maintainers] guide. Information on how to update and test the prebuilt toolchain
-in the NDK can be found in the [Toolchains](Toolchains.md) guide.
-
-### Build systems
-
-While the NDK is primarily a toolchain for building Android code, the package
-also includes some build system support.
-
-First, `$NDK/build/core` contains ndk-build. This is the NDK's home grown build
-system. The entry point for this build system is `$NDK/build/ndk-build` (or
-`$NDK/build/ndk-build.cmd`).
-
-A CMake toolchain file is included at
-`$NDK/build/cmake/android.toolchain.cmake`. This toolchain file configures some
-default behaviors and then delegates to the [built-in CMake NDK support], which
-in turn allows the NDK to customize some internal behaviors via the hooks in
-`$NDK/build/cmake/hooks`. For some configurations, CMake support for the NDK is
-entirely implemented in `android-legacy.toolchain.cmake`. Which toolchain is
-used by default depends on both the NDK and CMake version, as we will default to
-the legacy toolchain file when the new toolchain file has known regressions. To
-determine which behavior is the default for a given NDK, check the fallback
-condition in `android.toolchain.cmake`.
-
-[built-in CMake NDK support]: https://cmake.org/cmake/help/latest/manual/cmake-toolchains.7.html#cross-compiling-for-android
-
-`$NDK/build/tools/make_standalone_toolchain.py` is a tool which can create a
-redistributable toolchain that targets a single Android ABI and API level. As of
-NDK r19 it is unnecessary, as the installed toolchain may be invoked directly,
-but it remains for compatibility.
-
-Apps and Android libraries (AARs) are typically built by the Gradle using the
-Android Gradle Plugin (AGP). AGP uses `externalNativeBuild` tasks to delegate
-the native build to either CMake or ndk-build and then handles packaging the
-built libraries into the APK. Since the Android Gradle plugin is responsible for
-both Java and native code, is not included as part of the NDK.
-
-### Support libraries
-
-`sources/android` and `sources/third_party` contain modules that can be used in
-apps (gtest, cpufeatures, native\_app\_glue, etc) via `$(call
-import-module,$MODULE)` in ndk-build. CMake modules are not yet available.
-
-## The platform
-
-Most of what NDK users mean when they refer to "the NDK" is actually the C API
-surface that is exposed by the OS. These are present in what we consider the NDK
-(the zip file we ship) as header files and stub libraries in the sysroot.
-
-Each NDK contains a single set of headers for describing all the API levels it
-supports. This means that the same headers are used whether the user's
-`minSdkVersion` is 19 or 30, so APIs are annotated with
-`__attribute__((available))` so that the compiler can diagnose use of
-unavailable APIs.
-
-Stub libraries are provided per supported API level. The stub libraries matching
-the user's `minSdkVersion` are used at build time to ensure that apps only use
-symbols which are available (though in the future these may be [weak
-references](https://github.com/android/ndk/issues/837) to allow apps a more
-ergonomic method of conditionally accessing maybe-available APIs). The stub
-libraries are **not** packaged in the APK, but instead are loaded from the OS.
-
-Sysroot updates (new system APIs) are delivered to the NDK when an update is
-manually triggered. The platform build generates the sysroot, and that artifact
-is snapshot in prebuilts/ndk/platform. The prebuilt that is checked in is what
-will be shipped.
diff --git a/docs/BuildSystemMaintainers.md b/docs/BuildSystemMaintainers.md
index 2f93173..d23860a 100644
--- a/docs/BuildSystemMaintainers.md
+++ b/docs/BuildSystemMaintainers.md
@@ -21,14 +21,14 @@
 
 ## Introduction
 
-The NDK uses the [LLVM] family of tools for building C/C++ code. These include
-[Clang] for compilation, [LLD] for linking, and other [LLVM tools] for other
-tasks.
+The NDK uses [Clang] as its C/C++ compiler and [Binutils] for linking,
+archiving, and object file manipulation. Binutils provides both BFD and gold for
+linking. LLVM's [LLD] is also included for testing. AOSP uses LLD by default for
+most projects and the NDK is expected to move to it in the future.
 
+[Binutils]: https://www.gnu.org/software/binutils
 [Clang]: https://clang.llvm.org/
 [LLD]: https://lld.llvm.org/
-[LLVM tools]: https://llvm.org/docs/CommandGuide/
-[LLVM]: https://llvm.org/
 
 ### Architectures
 [Architectures]: #architectures
@@ -93,12 +93,12 @@
 
 Clang automatically enables NEON for all API levels. ARM devices without NEON
 are uncommon. To support non-NEON devices, pass `-mfpu=vfpv3-d16` when
-compiling. Alternatively, use the Play Console to [exclude CPUs] without NEON
+compiling. Alternatively, use the Play Console to [blacklist CPUs] without NEON
 to disallow your app from being installed on those devices.
 
 [Android CDD]: https://source.android.com/compatibility/cdd
 [NEON]: https://developer.arm.com/technologies/neon
-[exclude CPUs]: https://support.google.com/googleplay/android-developer/answer/7353455?hl=en
+[blacklist CPUs]: https://support.google.com/googleplay/android-developer/answer/7353455?hl=en
 
 ### OS Versions
 [OS Versions]: #os-versions
@@ -114,13 +114,11 @@
 to the application's `minSdkVersion`.
 
 The API level targeted by an NDK application determines which APIs will be
-exposed for use by the application. By default, APIs that are not present in the
-targeted API level cannot be linked directly, but may be accessed via `dlsym`.
-An NDK application running on a device with an API level lower than the target
-will often not load at all. If it does load, it may not behave as expected. This
-is not a supported configuration. This behavior can be altered by following the
-section about [weak symbols]. Be sure your users understand the implications of
-doing so.
+exposed for use by the application. APIs that are not present in the targeted
+API level cannot be linked directly, but may be accessed via `dlsym`. An NDK
+application running on a device with an API level lower than the target will
+often not load at all. If it does load, it may not behave as expected. This is
+not a supported configuration.
 
 The major/minor version number given to an Android OS has no meaning when it
 comes to determining its API level. See the table in the [Build numbers]
@@ -133,8 +131,7 @@
 20 should use API 19 for their NDK target.
 
 To programatically determine the list of supported API levels as well as aliases
-that are accepted by ndk-build and CMake, see `<NDK>/meta/platforms.json`. For
-ABI specific minimum supported API levels, see `<NDK>/meta/abis.json`.
+that are accepted by ndk-build and CMake, see `<NDK>/meta/platforms.json`.
 
 Note: In some contexts the API level may be referred to as a platform. In this
 document an API level is always an integer, and a platform takes the form of
@@ -152,38 +149,6 @@
 [Build numbers]: https://source.android.com/setup/start/build-numbers
 [Distribution dashboard]: https://developer.android.com/about/dashboards/
 [uses-sdk]: https://developer.android.com/guide/topics/manifest/uses-sdk-element
-[weak symbols]: #weak-symbols-for-api-definitions
-
-### Page sizes
-
-Android V will allow OEMs to ship arm64-v8a and x86_64 devices with 16KiB page
-sizes. Devices that use this configuration will not be able to run existing apps
-that use native code. To be compatible with these devices, applications will
-need to rebuild all their native code to be 16KiB aligned, and rewrite any code
-which assumes a specific page size. See [Support 16 KB page sizes] for details.
-
-Note: 16KiB compatible binaries are also compatible with 4KiB page devices. You
-do not need to build both 16KiB and 4KiB variants of your libraries.
-
-To minimize disruption, the default configuration for NDK r27 remains 4KiB page
-sizes. A future NDK (likely r28) will change the defaults. To support building
-16KiB compatible apps in your build system, do the following:
-
-1. When linking arm64-v8a or x86_64 code, set the linker's max-page-size to
-   16384: `-Wl,-z,max-page-size=16384`. This will increase the size of the
-   binaries.
-2. Define `__BIONIC_NO_PAGE_SIZE_MACRO` to configure libc to hide the
-   declaration of `PAGE_SIZE` from the build: `-D__BIONIC_NO_PAGE_SIZE_MACRO`.
-   There is no valid build-time constant for the page size in a world where
-   devices have varying page sizes. Runtime checks with `getpagesize()` are
-   required.
-
-Note that, for the time being, this only needs to be done for arm64-v8a. The
-x86_64 emulator (see [Support 16 KB page sizes] for details) will support larger
-page sizes for testing purposes, but there are no plans to change the page size
-for the 32-bit ABIs, and riscv64 does not support 16KiB page sizes at all.
-
-[Support 16 KB page sizes]: https://developer.android.com/guide/practices/page-sizes
 
 ## Clang
 
@@ -198,18 +163,12 @@
 being linked were generated from C++ files) and `clang++` should be used
 otherwise. Using `clang++` ensures that the C++ standard library is linked.
 
-When linking a shared library, the `-Wl,-soname,$NAME_OF_LIBRARY` argument is
-required. This is necessary to avoid the problems described in [this stack
-overflow post](https://stackoverflow.com/a/48291044/632035). For example, when
-building `libapp.so`, `-Wl,-soname,libapp.so` must be used.
-
 ### Target Selection
 
-[Cross-compilation] targets can be selected in one of two ways: by using
-the `--target` flag, or by using target-specific wrapper scripts.
+[Cross-compilation] targets can be selected in one of two ways.
 
-If possible, we recommend using the `--target` flag, which is described more
-fully in the [Clang User Manual]. The value passed is a Clang target
+First, the `--target` flag can be used (see the [Clang User Manual] for more
+details on Clang's supported arguments). The value passed is a Clang target
 triple suffixed with an Android API level. For example, to target API 26 for
 32-bit ARM, use `--target armv7a-linux-androideabi26`.
 
@@ -218,17 +177,18 @@
 ARMv5 and thumb code generation will result in Thumb-1 being generated rather
 than Thumb-2, which is less efficient.
 
-If not possible to use the `--target` flag, we supply wrapper scripts alongside
-the `clang` and `clang++` binaries, named `<triple><API-level>-clang` and
-`<triple><API-level>-clang++`. For example, to target API 26 32-bit ARM,
+Second, a target-specific Clang can be used. In addition to the `clang` and
+`clang++` binaries, there are also `<triple><API-level>-clang` and
+`<triple><API-level>-clang++` scripts. For example, to target API 26 32-bit ARM,
 invoke `armv7a-linux-androideabi26-clang` or
-`armv7a-linux-androideabi26-clang++` instead of `clang` or `clang++`. These
-wrappers come in two forms: Bash scripts (for Mac, Linux, Cygwin, and WSL) and
-Windows batch files (with `.cmd` extensions).
+`armv7a-linux-androideabi26-clang++` instead of `clang` or `clang++`.
 
-Note: For projects with many source files, the wrapper scripts may cause
-noticeable overhead, which is why we recommend using `--target`. The overhead
-is most significant on Windows, as `CreateProcess` is slower than `fork`.
+Note: Target specific Clangs are currently implemented as shell scripts. Linux
+and Mac NDKs have Bash scripts, Windows includes Bash scripts to support Cygwin
+and WSL but also batch scripts (with `.cmd` extensions) for Windows command
+line support. For large numbers of relatively small source files, the additional
+overhead caused by these scripts may be noticably slower than using `--target`,
+especially on Windows where `CreateProcess` is slower than `fork`.
 
 For more information on Android targets, see the [Architectures] and [OS
 Versions] sections.
@@ -238,44 +198,60 @@
 
 ## Linkers
 
-The NDK uses LLD for linking. The linker is installed to
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/<triple>-ld`.
+Gold is used by default for most architectures, but BFD is used for AArch64 as
+Gold emits broken debug information for that architecture (see [Issue 70838247]
+for more details).
 
 Note: It is usually not necessary to invoke the linkers directly since Clang
 will do so automatically. Clang will also automatically link CRT objects and
 default libraries and set up other target-specific options, so it is generally
 better to use Clang for linking.
 
+The default linkers are installed to
+`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/<triple>-ld` and
+`<NDK>/toolchains/llvm/prebuilt/<host-tag>/<triple>/bin/ld`. To use BFD or gold
+explicitly, use `ld.bfd` or `ld.gold` from the same locations. `ld.lld` is not
+installed to the triple directory and is not triple-prefixed, but rather is only
+installed as `<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/ld.lld` because the
+one binary supports all ABIs.
+
+Warning: Using LLD with GNU `strip` or `objcopy` breaks RelRO. LLVM `strip` and
+`objcopy` must be used with LLD. See [Issue 843] and the [Binutils] section of
+this document for more information.
+
 [Issue 70838247]: https://issuetracker.google.com/70838247
+[Issue 843]: https://github.com/android-ndk/ndk/issues/843
 
 ## Binutils
 
-LLVM's binutils tools are installed to the NDK at
-`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/llvm-<tool>`. These include but
-are not limited to:
+GNU Binutils tools are installed to
+`<NDK>/toolchains/llvm/prebuilt/<host-tag>/bin/<triple>-<tool>` and
+`<NDK>/toolchains/llvm/prebuilt/<host-tag>/<triple>/bin/<tool>`. These include
+but are not limited to:
 
-* llvm-ar
-* llvm-objcopy
-* llvm-objdump
-* llvm-readelf
-* llvm-strip
+ * ar
+ * as
+ * objcopy
+ * objdump
+ * readelf
+ * strip
 
-All LLVM tools are capable of handling every target architecture. Unlike Clang,
-no `-target` argument is required for these tools, so they should behave
-correctly when used as drop-in replacements for their GNU equivalents. Some
-tools may optionally accept a `-target` argument, but if omitted they will
-select the correct target based on the input files.
+For some of these tools, LLVM equivalents are available. They typically have the
+same name but are prefixed with `llvm-`. For example, `llvm-strip` is used
+instead of `<triple>-strip` or the `strip` binary from the triple-specific
+directory.
 
-Note that `llvm-as` is **not** an equivalent of GNU `as`, but rather a tool for
-assembling LLVM IR. If you are currently using `as` directly, you will need to
-migrate to using `clang` as a driver for building assembly.  See [Clang
-Migration Notes] for advice on fixing assembly to be LLVM compatible.
+Note: llvm-strip's `--strip-unneeded` is not currently pruning all that it
+should. As a workaround, `--strip-all` (which differs in behavior from GNU's
+`--strip-all`) can be used instead. See [Issue 1083] for more information.
 
-Note that by default `/usr/bin/as` is used by Clang if the
-`-fno-integrated-as` argument is used, which is almost certainly not
-what you want!
+[Issue 1083]: https://github.com/android/ndk/issues/1083
 
-[Clang Migration Notes]: ClangMigration.md
+Android is moving away from GNU Binutils in favor of LLVM tools. This is a work
+in progress, but it is likely that a future release of the NDK will deprecate
+and eventually remove GNU Binutils. For now, ensure that your build system works
+with `llvm-strip` and `llvm-objcopy` as they are required when using LLD ([Issue
+843]).
 
 ## Sysroot
 
@@ -309,72 +285,7 @@
 The entries in this file are a key/value pair that maps library names to the
 first API level the library is introduced.
 
-## Weak symbols for API definitions
-
-See [Issue 837].
-
-The Android APIs are exposed as strong symbols by default. This means that apps
-must not directly refer to any APIs that were not available in their
-`minSdkVersion`, even if they will not be called at runtime. The loader will
-reject any library with strong references to symbols that are not present at
-load time.
-
-It is possible to expose Android APIs as weak symbols to alter this behavior to
-more closely match the Java behavior, which many app developers are more
-familiar with. The loader will allow libraries with unresolved references to
-weak symbols to load, allowing those APIs to be safely called as long as they
-are only called when the API is available on the device. Absent APIs will have a
-`nullptr` address, so calling an unavailable API will segfault.
-
-Note: APIs that are guaranteed to be available in the `minSdkVersion` (the API
-level passed to Clang with `-target`) will always be strong references, even
-with this option enabled.
-
-This is not enabled by default because, unless used cautiously, this method is
-prone to deferring build failures to run-time (and only on older devices, since
-newer devices will have the API). The loader not prevent the library from
-loading, but the function's address will be `nullptr` if the API is not
-available (if the API is newer than the OS). The API availability should be
-checked with `__builtin_available` before making the call:
-
-```c++
-if (__builtin_available(android 33, *)) {
-  // Call some API that's only available in API 33+.
-} else {
-  // Use some fallback behavior, perhaps doing nothing.
-}
-```
-
-Clang offers some protections for this approach via `-Wunguarded-availability`,
-which will emit a warning unless the call to the API is guarded with
-`__builtin_available`.
-
-To enable this functionality, pass `-D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__`
-to Clang when compiling. We **strongly** recommend forcing
-`-Werror=unguarded-availability` when using this option.
-
-We recommend making the choice of weak or strong APIs an option in your build
-system. Most developers will likely prefer weak APIs as they are simpler than
-using `dlopen`/`dlsym`, and as long as `-Werror=unguarded-availability` is used,
-it should be safe. At the time of writing, the NDK's own build systems
-(ndk-build and CMake) use strong API references by default, but that may change
-in the future.
-
-Known issues and limitations:
-
-* Only symbols are affected, not libraries. The only way to conditionally depend
-  on a library that is not available in the app's `minSdkVersion` is with
-  `dlopen`. We do not know how to solve this in a backwards compatible manner.
-* APIs in bionic (libc, libm, libdl) are not currently supported. See the bug
-  for more information. If the source compatibility issues can be resolved, that
-  will change in a future NDK release.
-* Headers authored by third-parties (e.g. `vulkan.h`, which comes directly from
-  Khronos) are not supported. The implementation of this feature requires
-  annotation of all function declarations, and the upstream headers likely do
-  not contain those annotations. Solutions to this problem are being
-  investigated.
-
-[Issue 837]: https://github.com/android/ndk/issues/837
+[Issue 801]: https://github.com/android-ndk/ndk/issues/801
 
 ## STL
 
@@ -394,7 +305,9 @@
 There are version-specific libc++.so and libc++.a libraries installed to
 `<NDK>/sysroot/usr/lib/<triple>/<version>`. These are not true libraries but
 [implicit linker scripts]. They inform the linker how to properly link the STL
-for the given version. These scripts handle the inclusion of any libc++
+for the given version. Older OS versions may require that a compatibility
+library (libandroid_support) be linked with libc++ to provide APIs not available
+in those versions. These scripts also handle the inclusion of any libc++
 dependencies if necessary. Linker scripts should not be included in the APK.
 
 Build systems should prefer to let Clang link the STL. If not using Clang, the
@@ -402,11 +315,12 @@
 should only be used as a last resort.
 
 Note: Linking libc++ and its dependencies explicitly may be necessary to defend
-against exception unwinding bugs caused by improperly built dependencies (see
-[Issue 379]). If not dependent on stack unwinding (the usual reason being that
-the application does not make use of C++ exceptions) or if no dependencies were
-improperly built, this is not necessary. If needed, link the libraries as listed
-in the linker script and be sure to follow the instructions in [Unwinding].
+against exception unwinding bugs caused by improperly built dependencies on
+ARM32 (see [Issue 379]). If not dependent on stack unwinding (the usual reason
+being that the application does not make use of C++ exceptions) or if no
+dependencies were improperly built, this is not necessary. If needed, link the
+libraries as listed in the linker script and be sure to follow the instructions
+in [Unwinding].
 
 [Important Considerations]: https://developer.android.com/ndk/guides/cpp-support#important_considerations
 [Issue 379]: https://github.com/android-ndk/ndk/issues/379
@@ -440,9 +354,9 @@
 than Valgrind (roughly 50% performance compared to an unsanitized application).
 
 To use ASan, pass `-fsanitize=address` when both compiling and linking. The
-sanitizer runtime libraries are installed to `<clang resource dir>/lib/linux`.
-The Clang resource directory is given by `clang -print-resource-dir`. The
-library is named `libclang_rt.asan-<arch>-android.so`. This library must be
+sanitizer runtime libraries are installed to
+`<NDK>/toolchains/llvm/prebuilt/<host-tag>/lib64/clang/<clang-version>/lib/linux`.
+The library is named `libclang_rt.asan-<arch>-android.so`. This library must be
 included in the APK. A [wrap.sh] file must also be included in the APK. A
 premade wrap.sh file for ASan is installed to `<NDK>/wrap.sh`.
 
@@ -470,6 +384,12 @@
 builds PIE executables by default. If invoking the linker directly or not using
 Clang, use `-pie` when linking.
 
+Clang does not properly set the ARMv7 architecture for the non-integrated
+assembler. If using `-fno-integrated-as`, you must explicitly pass
+`-march=armv7-a` when compiling for 32-bit ARM. Note that by default Clang will
+use the integrated assembler, and this flag is not needed in that case. See
+[Issue 906].
+
 Android Studio's LLDB debugger uses a binary's build ID to locate debug
 information. To ensure that LLDB works with a binary, pass an option like
 `-Wl,--build-id=sha1` to Clang when linking. Other `--build-id=` modes are OK,
@@ -477,16 +397,9 @@
 version of LLDB doesn't recognize LLD's default 8-byte build ID. See [Issue
 885].
 
-The unwinder used for crash handling on Android devices prior to API 29 cannot
-correctly unwind binaries built with `-Wl,--rosegment`. This flag is enabled by
-default when using LLD, so if using LLD and targeting devices older than API 29
-you must pass `-Wl,--no-rosegment` when linking for correct stack traces in
-logcat. See [Issue 1196].
-
 [Issue 635]: https://github.com/android-ndk/ndk/issues/635
 [Issue 885]: https://github.com/android-ndk/ndk/issues/885
 [Issue 906]: https://github.com/android-ndk/ndk/issues/906
-[Issue 1196]: https://github.com/android/ndk/issues/1196
 [Position-independent executables]: https://en.wikipedia.org/wiki/Position-independent_code#Position-independent_executables
 
 ## Useful Arguments
@@ -502,6 +415,12 @@
 preserved. By default, only symbols in used sections will be included in the
 linked binary.
 
+If this behavior is not desired for your build system, ensure that these flags
+are at least used for `libgcc_real.a` (`libgcc.a` is a linker script, and
+`--exclude-libs` does not have any effect on the contents of linker scripts) and
+`libunwind.a` (libunwind is only used for ARM32). This is necessary to avoid
+unwinding bugs on ARM32. See [Unwinding] for more information.
+
 [visibility]: https://gcc.gnu.org/wiki/Visibility
 
 ### Controlling Binary Size
@@ -532,26 +451,6 @@
 
 [public symbols]: #dependency-management
 
-#### RELR and relocation packing
-
-Note that each of the flags below will prevent the library or executable from
-loading on older devices. If your `minSdkVersion` is at least the supported API
-level, these flags are typically beneficial. A future release of the NDK will
-likely enable this by default based on the `minSdkVersion` passed to Clang. See
-[Issue 909] for more information.
-
-Beginning with API level 23 it is possible to compress the relation data in
-libraries and executables. Libraries with large numbers of relocations will
-benefit from this. Enable with `-Wl,--pack-dyn-relocs=android` at link time.
-
-API level 28 adds support for relative relocations (RELR) which can further
-reduce the size of relocations. Enable with `-Wl,--pack-dyn-relocs=android+relr`
-at link time. API levels 28 and 29 predate the standardization of this feature
-in ELF, so for those API levels also pass `-Wl,--use-android-relr-tags` at link
-time.
-
-[Issue 909]: https://github.com/android/ndk/issues/909
-
 ### Helpful Warnings
 
 It is recommended that build systems promote the following warnings to errors.
@@ -597,56 +496,49 @@
 
 [FORTIFY in Android]: https://android-developers.googleblog.com/2017/04/fortify-in-android.html
 
-### Version script validation
-
-LLD will not raise any errors for symbols named in version scripts that are
-absent from the library. This is either a mistake in the version script, or a
-missing definition in the library. To have LLD diagnose these errors, pass
-`-Wl,--no-undefined-version` when linking.
-
 ## Common Issues
 
 ### Unwinding
 [Unwinding]: #unwinding
 
-The NDK uses LLVM's libunwind. libunwind is needed to provide C++ exception
-handling support and C's `__attribute__((cleanup))`. The unwinder is linked
-automatically by Clang, and is built with hidden visibility to avoid shared
-libraries re-exporting the unwind interface.
+For 32-bit ARM the NDK makes use of two unwinders: libgcc and LLVM's libunwind.
+libunwind is needed to provide C++ exception handling support. libgcc is needed
+to provide compiler runtime support and as such its unwinder is also seen by the
+linker.
 
-Until NDK r23, libgcc was the unwinder for all architectures other than 32-bit
-ARM, and even 32-bit ARM used libgcc to provide compiler runtime support.
-Libraries built with NDKs older than r23 by build systems that did not follow
-the advice in this document may re-export that incompatible unwinder. In this
-case those libraries can prevent the correct unwinder from being used by your
-build, resulting in crashes or incorrect behavior at runtime.
+These two unwinders are not ABI compatible but do use the same names, so caution
+is required to avoid ODR bugs. For 32-bit ARM, the libgcc.a in the NDK is a
+linker script that ensures that libunwind is linked before libgcc, causing the
+linker to prefer symbols from libunwind to those from libgcc.
 
-The best way to avoid this problem is to ensure all libraries in the application
-were built with NDK r23 or newer, but even libraries built by older NDKs are
-unlikely to have this problem.
+As these are static libraries, the symbols will be included in the linked
+binary. By default they will be linked with public visibility. If used in a
+build system that does not strictly adhere to only linking shared libraries
+after all objects and static libraries, the binary being linked may instead load
+these symbols from a shared library. If this library was built with the wrong
+unwinder, it is possible for one unwinder to call into the other. As they are
+not compatible, this will likely result in either a crash or a failed unwind. To
+avoid this problem, libraries should always be built with
+`-Wl,--exclude-libs,libgcc_real.a` and `-Wl,--exclude-libs,libunwind.a` (the
+latter is only necessary for 32-bit ARM) to ensure that unwind symbols are not
+re-exported from shared libraries.
 
-For build systems that want to protect their users against improperly built
-libraries, read on. **Neither ndk-build nor CMake make this effort.**
-
-To protect against improperly built libraries, build systems can ensure that
-shared libraries are always linked **after** static libraries, and explicitly
-link the unwinder between each group. The linker will prefer definitions that
-appear sooner in the link order, so libunwind appearing **before** the shared
-libraries will prevent the linker from considering the incompatible unwinder
-provided by the broken library. libunwind must be linked after other static
-libraries to provide the unwind interface to those static libraries.
-
-The following link order will protect against incorrectly built dependencies:
+Even with the above precautions, it is still possible for an improperly built
+external dependency to provide an incorrect unwind implementation as described
+in the above paragraph. The only way to guarantee protection against this for
+libraries built in your build system is to ensure that objects are linked in the
+following order:
 
  1. crtbegin
  2. object files
  3. static libraries
- 4. libunwind
+ 4. libgcc
  5. shared libraries
  6. crtend
 
 Unless using `-nostdlib` when linking, crtend and crtbegin will be linked
-automatically by Clang. libunwind can be manually linked with `-lunwind`.
+automatically by Clang. Linking libraries in the order above will require
+`-nostdlib++` when using libc++.
 
 ## Windows Specific Issues
 
diff --git a/docs/Building.md b/docs/Building.md
index 814fa95..6f41b08 100644
--- a/docs/Building.md
+++ b/docs/Building.md
@@ -6,26 +6,22 @@
 Both Linux and Windows NDKs are built on Linux machines. Windows host binaries
 are cross-compiled with MinGW.
 
-Building the NDK for Mac OS X requires at least 10.13.
+Building the NDK for Mac OS X requires at least 10.8.
 
 ## Prerequisites
 
-The first thing you need is the AOSP NDK repository. If you're new to using repo
-and gerrit, see [repo.md](repo.md) for tips. If you're already familiar with how
-to use repo and gerrit from other Android projects, you already know plenty :)
+* [AOSP NDK Repository](http://source.android.com/source/downloading.html)
+    * Check out the branch `master-ndk`
 
-Check out the branch `master-ndk`. Do this in a new directory.
+        ```bash
+        repo init -u https://android.googlesource.com/platform/manifest \
+            -b master-ndk
 
-```bash
-# For non-Googlers:
-repo init -u https://android.googlesource.com/platform/manifest -b master-ndk --partial-clone
-
-# Googlers, follow http://go/repo-init/master-ndk (select AOSP in the Host menu,
-# and uncheck the box for the git superproject). At time of writing, the correct
-# invocation is:
-repo init -u \
-    sso://android.git.corp.google.com/platform/manifest -b master-ndk --partial-clone
-```
+        # Googlers, use
+        repo init -u \
+            persistent-https://android.git.corp.google.com/platform/manifest \
+            -b master-ndk
+        ```
 
 If you wish to rebuild a given release of the NDK, the release branches can also
 be checked out. They're named `ndk-release-r${RELEASE}` for newer releases, but
@@ -48,136 +44,33 @@
 [Dockerfile]: ../infra/docker/Dockerfile
 [Android SDK]: https://developer.android.com/studio/index.html#downloads
 
-## Python environment setup
+## Building the NDK
 
-To set up your environment to use the correct versions of Python and Python
-packages, install [Poetry](https://python-poetry.org/) and then do the
-following.
-
-Whenever you set up a new NDK tree (after a fresh `repo init`, for example),
-configure the project to use our prebuilt Python instead of your system's. If on
-Mac, be sure to use the darwin-x86 version instead.
-
-```bash
-poetry env use ../prebuilts/python/linux-x86/bin/python3
-```
-
-The first time, and also anytime you sync because there might be new or updated
-dependencies, install the NDK dependencies to the virtualenv managed by poetry.
-
-```bash
-poetry install
-```
-
-Note: If `poetry install` hangs on Linux, try
-`PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring poetry install`.
-
-Spawn a new shell using the virtualenv that Poetry created. You could instead
-run NDK commands with the `poetry run` prefix (e.g. `poetry run
-./checkbuild.py`), but it's simpler to just spawn a new shell. Plus, if it's in
-your environment your editor can use it.
-
-```bash
-poetry shell
-```
-
-### macOS workarounds
-
-On macOS you may not be able to use the Python that is in prebuilts because it
-does not support the ssl module (which poetry itself needs). Until the Python
-prebuilt includes that module, do the following to use a different Python:
-
-First time setup: ensure that you have pyenv installed. You may need to install
-homebrew (http://go/homebrew for Googlers, else https://brew.sh/).
-
-```
-$ brew update && brew upgrade pyenv
-```
-
-Then set up your tree to use the correct version of Python. This setting will
-apply to the directory it is run in, so you will need to do it per NDK tree.
-
-```
-# From the //ndk directory of your NDK tree:
-$ ../prebuilts/python/darwin-x86/bin/python3 --version
-Python 3.11.4
-# We don't need to match the version exactly, just the major/minor version.
-$ pyenv install 3.11:latest
-$ pyenv local 3.11
-$ python --version
-Python 3.11.8
-$ poetry env use 3.11
-poetry install
-```
-
-Each time the NDK updates to a new version of Python, you'll need to repeat
-those steps. You may also need to remove the old poetry environment
-(`poetry env list` to get the name, `poetry env remove` to remove it).
-
-`checkbuild.py` and `run_tests.py` will complain when you try to use a Python
-that doesn't come from prebuilts by default. To suppress that, pass
-`--permissive-python-environment` when using those tools in this environment.
-
-## Build
-
-### For Linux or Darwin
+### For Linux or Darwin:
 
 ```bash
 $ python checkbuild.py
 ```
 
-If you get an error like the following:
-
-```
-Expected python to be $NDK_SRC/prebuilts/python/$HOST/bin/python3.9, but is ~/.cache/pypoetry/virtualenvs/$VENV/bin/python (/usr/bin/python3.9).
-```
-
-Your Poetry virualenv was misconfigured. It seems that `poetry env use` will not
-replace an existing virtualenv of the same major/minor version, so if you ran
-any poetry commands before `poetry env use`, your environment needs to be
-deleted and recreated.
+### For Windows, from Linux:
 
 ```bash
-$ poetry env remove $VENV
-$ poetry env use ../prebuilts/python/linux-x86/bin/python3
-$ poetry install
+$ python checkbuild.py --system windows64  # Or "windows", for a 32-bit host.
 ```
 
-If you get an error like the following:
-
-```
-Expected python to be $NDK_SRC/prebuilts/python/linux-x86/bin/python3.9, but is /usr/bin/python (/usr/bin/python3.9).
-```
-
-You ran checkbuild.py outside the poetry environment. Ensure that you've done
-the first time setup (`poetry env use` and `poetry install`, as above), then
-either run `poetry shell` to enter a new shell with the correct environment, or
-use `poetry run checkbuild.py`.
-
-If you get errors from the pythonlint task but it appears to only affect your
-machine, one of the linters you have installed is probably not the correct
-version. Run `poetry install` to sync your environment with the expected
-versions.
-
-### For Windows, from Linux
-
-```bash
-$ python checkbuild.py --system windows64
-```
-
-`checkbuild.py` will also build all of the NDK tests. This takes about 3x as
-long as building the NDK itself, so pass `--no-build-tests` to skip building the
-tests if you're iterating on build behavior or plan to rebuild only specific
-tests. Tests can be built later with `python run_tests.py --rebuild`.
+`checkbuild.py` will also build all of the NDK tests. This takes about as long
+as building the NDK itself, so pass `--no-build-tests` to skip building the
+tests. They can be built later with `python run_tests.py --rebuild`.
 
 Note: The NDK's build and test scripts are implemented in Python 3 (currently
-3.9). `checkbuild.py` will use a prebuilt Python, but `run_tests.py` does not do
-this yet. `run_tests.py` also can be run outside of a complete development
-environment (as it is when it is run on Windows), so a Python 3.9 virtualenv is
-recommended.
+3.6). `checkbuild.py` will bootstrap by building Python 3.6 from source before
+running, but `run_tests.py` does not do this yet. `run_tests.py` also can be run
+outside of a complete development environment (as it is when it is run on
+Windows), so a Python 3.6 virtualenv is recommended.
 
 ## Packaging
 
-Packaging uses `zip -9` so is extremely time consuming and disabled by default.
-Use the `--package` flag to force packaging locally. This is not required for
-local development and only needs to be used when testing packaging behavior.
+By default, `checkbuild.py` will also package the NDK. To skip the packaging
+step, use the `--no-package` flag. To avoid packaging an incomplete NDK,
+packaging will not be run if `--module` was passed unless `--force-package` was
+also provided.
diff --git a/docs/ClangMigration.md b/docs/ClangMigration.md
index 49ff635..8f4be4a 100644
--- a/docs/ClangMigration.md
+++ b/docs/ClangMigration.md
@@ -1,14 +1,26 @@
 # Clang Migration Notes
 
-NDK r17 was the last version to include GCC. If you're upgrading from an old NDK
-and need to migrate to Clang, this doc can help.
+The Android OS switched to clang several years ago. Future versions of
+the NDK will remove GCC, so the sooner you start testing your project
+with clang the better!
 
-If you maintain a custom build system, see the [Build System Maintainers]
-documentation.
+## How to switch to clang
 
-[Build System Maintainers]: ./BuildSystemMaintainers.md
+For `ndk-build`, remove lines setting `NDK_TOOLCHAIN` or
+`NDK_TOOLCHAIN_VERSION`.
 
-## `-Oz` versus `-Os`
+For cmake, remove lines setting `ANDROID_TOOLCHAIN`.
+
+For standalone toolchains, use the `clang`/`clang++` binaries instead of
+`gcc`/`g++`.
+
+For other build systems, ask the owners of that build system.
+
+## How to fix common problems
+
+When moving to Clang from GCC, you may notice some differences.
+
+### `-Oz` versus `-Os`
 
 [Clang Optimization Flags](https://clang.llvm.org/docs/CommandGuide/clang.html#code-generation-options)
 has the full details, but if you used `-Os` to optimize your
@@ -20,7 +32,7 @@
 size *and* performance improvements when moving to Clang compared to
 `-Os` with GCC.
 
-## `__attribute__((__aligned__))`
+### `__attribute__((__aligned__))`
 
 Normally the `__aligned__` attribute is given an explicit alignment,
 but with no value means “maximum alignment”. The interpretation of
@@ -32,7 +44,7 @@
 aligned. Most code should either specify an explicit alignment or use
 [alignas](http://en.cppreference.com/w/cpp/language/alignas) instead.
 
-## `-Bsymbolic`
+### `-Bsymbolic`
 
 When targeting Android (but no other platform), GCC passed
 [-Bsymbolic](ftp://ftp.gnu.org/old-gnu/Manuals/ld-2.9.1/html_node/ld_3.html)
@@ -81,78 +93,8 @@
 version scripts are an even more powerful mechanism for controlling
 exported symbols, but harder to use.
 
-## Assembler issues
+### `-fno-integrated-as`
 
-For many years the problem of adjusting inline assembler to work with
-LLVM could be punted down the road by using `-fno-integrated-as` to fall
-back to the GNU Assembler (GAS). With the removal of GNU binutils from
-the NDK, such issues will now need to be addressed. We’ve collected
-some of the most common issues and their solutions/workarounds here.
-
-### `.arch` or `.arch_extension` scope with `__asm__`
-GAS doesn’t scope `.arch` or `.arch_extension`, so you can have a global
-`__asm__(".arch foo")` that applies to the whole C/C++ source file,
-just like a bare `.arch` or `.arch_extension` directive would in a .S
-file. LLVM scopes these to the specific `__asm__` in which it occurs,
-so you’ll need to adapt your inline assembler, or build the whole file
-for the relevant arch variant.
-
-### ARM `ADRL`
-GAS lets you use the `ADRL` pseudoinstruction to get the address of
-something too far away for a regular `ADR` to reference. This means
-that it expands to two instructions, which LLVM doesn’t support,
-so you’ll need to use a macro something like this instead:
-```
-  .macro ADRL reg:req, label:req
-  add \reg, pc, #((\label - .L_adrl_\@) & 0xff00)
-  add \reg, \reg, #((\label - .L_adrl_\@) - ((\label - .L_adrl_\@) & 0xff00))
-  .L_adrl_\@:
-  .endm
-```
-
-### ARM assembler syntactical strictness
-While GAS supports the older divided and newer unified syntax (selectable
-via `.syntax unified` and `.syntax divided`), LLVM only supports the
-newer unified syntax.
-
-As an example of where this matters, `LDR` has an optional type and the
-optional condition code allowed on all instructions. GAS allows these
-to come in either order when using divided syntax, but LLVM only allows
-them in the canonical order given in the ARM instruction reference (which
-is what “unified” syntax means). So continuing this example, GAS
-accepts both `LDRBEQ` and `LDREQB`, but LLVM only accepts `LDRBEQ` (with
-the condition code at the end, as the instruction appears in the manual).
-
-Most humans usually use this order anyway, but you’ll have to rearrange
-any instructions that don’t use the canonical order.
-
-### ARM assembler implicit operands
-Some ARM instructions have restrictions that make some operands
-implicit. For example, the two target registers supplied to `LDREXD`
-must be consecutive. GAS would allow you to write `LDREXD R1, [R4]`
-because the other register _must_ be `R2`, but LLVM requires both
-registers to be explicitly stated, in this case `LDREXD R1, R2, [R4]`.
-
-### ARM `.arm` or `.code 32` alignment
-Switching from Thumb to ARM mode implicitly forces 4-byte alignment
-with GAS but doesn’t with LLVM. You may need to use an explicit
-`.align`/`.balign`/`.p2align` directive in such cases.
-
-### No `--defsym` command-line option
-GAS and LLVM implement their own conditional assembly mechanism with
-`.if`...`.endif` rather than the C preprocessor’s `#if`...`#endif`. The
-equivalent of `-DA=B` for `.if` is `-Wa,-defsym,A=B`, but GAS allowed
-`--defsym` instead of `-defsym`. LLVM requires `-defsym`.
-
-You might also prefer to just use the C preprocessor. If your assembly
-is in a .S file it is already being preprocessed. If your assembly
-is in a file with any other extension (including `.s` --- this is the
-difference between `.s` and `.S`), you’ll need to either rename it to
-`.S` or use the `-x assembler-with-cpp` flag to the compiler to override
-the file extension-based guess.
-
-### No `.func`/`.endfunc`
-GAS ignores a request for obsolete STABS debugging information to be
-emitted using `.func` and `.endfunc`. Neither GAS nor LLVM actually
-support STABS, but LLVM rejects these meaningless directives. The fix
-is simply to remove them.
+Especially for ARM and ARM64, Clang is much stricter about assembler
+rules than GCC/GAS. Use `-fno-integrated-as` if Clang reports errors in
+inline assembly or assembly files that you don't wish to modernize.
diff --git a/docs/Onboarding.md b/docs/Onboarding.md
deleted file mode 100644
index 0116d72..0000000
--- a/docs/Onboarding.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# Onboarding
-
-The latest version of this document is available at
-https://android.googlesource.com/platform/ndk/+/master/docs/Onboarding.md.
-
-Welcome to the Android NDK! This guide will explain how to get started working
-on the NDK (if you want to work *with* the NDK, see the [user
-documentation](https://github.com/android/ndk/discussions)).
-
-Googlers: you'll also want to refer to
-[go/ndk-onboarding](http://go/ndk-onboarding) which will describe any details
-not relevant to external contributors. Non-Googlers, don't worry, you're not
-missing anything interesting, just boring things like instructions for how to
-work our release tooling.
-
-[TOC]
-
-## Guides and docs
-
-1. [Architecture](Architecture.md)
-1. [Building](Building.md)
-1. [Testing](Testing.md)
-1. [Release process](https://github.com/android/ndk/wiki/NDK-Release-Process)
-1. [Downloads](https://github.com/android/ndk/wiki)
-1. [Bug tracker](https://github.com/android/ndk/issues)
-1. [Android Studio/Gradle plugin bug tracker](https://developer.android.com/studio/report-bugs)
-1. [Toolchain](Toolchains.md)
-1. [Platform APIs](PlatformApis.md)
-1. [Roadmap](Roadmap.md)
-1. [Project boards](https://github.com/android/ndk/projects)
-1. [Canary builds](ContinuousBuilds.md)
-1. [User documentation](https://developer.android.com/ndk/index.html)
-1. [Internal documentation](http://go/ndk)
-
-## Mailing lists
-
-* [android-ndk Google Group] or [GitHub
-  Discussions](https://github.com/android/ndk/discussions) for NDK discussion.
-* [android-ndk-announce Google Group] for release announcements.
-
-[android-ndk Google Group]: http://groups.google.com/group/android-ndk
-[android-ndk-announce Google Group]: http://groups.google.com/group/android-ndk-announce
-
-## Chat
-
-* [#ndk on the r/AndroidDev Discord](https://discord.gg/8rjcbDbh6e)
diff --git a/docs/PlatformApis.md b/docs/PlatformApis.md
index 2cbb935..cc781a7 100644
--- a/docs/PlatformApis.md
+++ b/docs/PlatformApis.md
@@ -131,8 +131,8 @@
 
 You wouldn't be adding a library to the NDK unless you actually wanted apps to
 be able to use your library, but in Android N or later, apps are only allowed
-to access libraries that are part of the NDK API surface. The list of libraries
-available to apps is stored in:
+to access libraries on a specific whitelist of NDK libraries. This list is
+stored in:
 - `system/core/rootdir/etc/public.libraries.android.txt` for main Android
 - `system/core/rootdir/etc/public.libraries.wear.txt` for Android Wear
 devices
@@ -195,33 +195,3 @@
 $ ./update_platform.py --no-download \
     path/to/platform/ndk-dist/ndk_platform.tar.bz2
 ```
-
-Note that most branches will include at least one codenamed release in the
-sysroot artifacts. Clang only handles integer API levels, so these will cause an
-error when updating the prebuilts if any codenames are found. To avoid such
-errors, use either `--remove-platform` or `--rename-codename` when updating.
-Whether the platform should be removed or renamed depends on the status of the
-release. For releases accompanying an Android developer preview the platform
-should be renamed, but for other releases the APIs should be removed.
-
-For example, prior to the Android R developer previews being available
-`--remove-platform R` was used. To include R API previews
-`--rename-codename R=30` was used.
-
-#### Possible problems
-If you get the error message like
-```bash
-RuntimeError: Could not rename android-something to android-xx because android-xx already exists.
-```
-when running
-```bash
-$ ./update_platform.py --no-download path/to/platform/ndk-dist/ndk_platform.tar.bz2
-```
-It it because `android-something` is in the `ndk-out/soong/ndk/platform`.
-
-Solution:
-Do a clean rebuild.
-```bash
-$ rm -rf path/to/platform/ndk-out
-$ ./update_platform.py --no-download path/to/platform/ndk-dist/ndk_platform.tar.bz2
-```
diff --git a/docs/Roadmap.md b/docs/Roadmap.md
index d609471..b659ccc 100644
--- a/docs/Roadmap.md
+++ b/docs/Roadmap.md
@@ -23,115 +23,100 @@
 Every NDK release aims to include a new toolchain, new headers, and a new
 version of libc++.
 
-We also maintain [GitHub Projects](https://github.com/android/ndk/projects)
-to track the bugs we intend to fix in any given NDK release.
+We also maintain [hotlists](https://github.com/android-ndk/ndk/milestones) 
+of the bugs we intend to fix in any given NDK release.
 
-### Toolchain updates
-
-The NDK and the Android OS use the same toolchain. Android's toolchain team is
-constantly working on updating to the latest upstream LLVM for the OS. It can
-take a long time to investigate issues when compiling -- or issues that the
-newer compiler finds in -- OS code or OEM code, for all 4 supported
-architectures, so these updates usually take a few months.
-
-Even then, a new OS toolchain may not be good enough for the NDK. In the OS, we
-can work around compiler bugs by changing our code, but for the NDK we want to
-make compiler updates cause as little disruption as possible. We also don't want
-to perform a full compiler update late in the NDK release cycle for the sake of
-stability.
-
-The aim is that each NDK will have a new toolchain that's as up to date as
-feasible without sacrificing stability, but we err on the side of stability when
-we have to make a choice. If an NDK release doesn't include a new compiler, or
-that compiler isn't as new as you'd hoped, trust us --- you wouldn't want
-anything newer that we have just yet!
-
-## Current work
-
-Most of the team's work is currently focused outside the NDK proper, so while
-the NDK release notes may seem a bit sparse, there are still plenty of
-improvements coming for NDK users:
-
-* Improving NDK and Android Gradle Plugin documentation.
-* Improving the OS (in particular the linker).
-* Working with the Android frameworks teams to get new NDK APIs.
-* Improving tooling for third-party packages via ndkports:
-  * Auto-update packages
-  * Automated testing
-  * More packages
-* Workflow improvements to decrease the costs of regular maintenance.
-
-### Apple M1
-
-https://github.com/android/ndk/issues/1299
-
-Migration of all the tools involved in an NDK build to be fat binaries will land
-over the course of a few releases. LLVM was shipped as universal binaries in
-r23b, and the rest of the tools are expected to move in r24. Further backports
-to r23 are unclear because they may risk destabilizing the release.
-
-### TSan
-
-https://github.com/android/ndk/issues/1041
-
-Port thread sanitizer for use with NDK apps, especially in unit/integration
-tests.
-
-### Testing tools
-
-Add [GTestJNI] to Jetpack to allow exposing native tests to AGP as JUnit tests.
-
-[GTestJNI]: https://github.com/danalbert/GTestJNI
-
-### More automated libc++ updates
-
-We still need to update libc++ twice: once for the platform, and once
-for the NDK. We also still have two separate test runners. We're consolidating
-all of these in one place (the toolchain) so that all LLVM updates include
-libc++ updates.
-
-### Jetpack
-
-We're working with the Jetpack team to build the infrastructure needed to start
-producing C++ Jetpack libraries. Once that's done we can start using Jetpack to
-ship helper libraries like libnativehelper, or C++ wrappers for the platform's C
-APIs. Wrappers for NDK APIs would also be able to, in some cases, backport
-support for APIs to older releases.
+---
 
 ## Future work
 
 The following projects are listed in order of their current priority.
 
 Note that some of these projects do not actually affect the contents of the NDK
-package. The samples, documentation, etc are all NDK work but are separate from
-the NDK package. As such they will not appear in any specific release, but are
-noted here to show where the team's time is being spent.
+package.  The samples, cdep, documentation, etc are all NDK work but are
+separate from the NDK package. As such they will not appear in any specific
+release, but are noted here to show where the team's time is being spent.
+
+### Easier access to common open-source libraries
+
+There are many other commonly-used libraries (such as Curl and BoringSSL)
+that are currently difficult to build/package, let alone keep updated. We
+should offer (a) a tool to build open source projects, (b) a repository
+of prebuilts, (c) a command-line tool to add prebuilts to an ndk-build/cmake
+project, and (d) Studio integration to add prebuilts via a GUI.
+
+The tools are nearly complete, and the repository is Maven for easy integration
+into existing Android projects. Access from CMake and ndk-build will be via the
+existing `find_package` and `import-module` facilities, respectively. A GUI in
+Studio will come later.
+
+For more information, see [Issue 916].
+
+[Issue 916]: https://github.com/android/ndk/issues/916
+
+### C++ File System API
+
+[Issue 609](https://github.com/android-ndk/ndk/issues/609)
+
+We don't currently build, test, or ship libc++'s std::filesystem. Until recently
+this API wasn't final, but now is at least a stable API (though it sounds like
+the ABI will change in the near future).
+
+There's a fair amount of work involved in getting these tests running, but
+that's something we should do.
+
+### CMake
+
+CMake added their own NDK support about the same time we added our
+toolchain file. The two often conflict with each other, and a toolchain
+file is a messy way to implement this support. However, fully switching to
+the integrated support puts NDK policy decisions (default options, NDK layout,
+etc) fully into the hands of CMake, which makes them impossible to update
+without the user also updating their CMake version.
+
+We will reorganize our toolchain file to match the typical implementation of a
+CMake platform integration (like `$CMAKE/Modules/Platform/Android-*.cmake`) and
+CMake will be modified to load the implementation from the NDK rather than its
+own.
+
+See [Issue 463](https://github.com/android-ndk/ndk/issues/463) for discussion.
+
+### Default to lld
+
+NDK r18 [made lld available](https://github.com/android-ndk/ndk/issues/683),
+r20 made it more usable, and a future release will make it the default once
+all the issues that turn up are resolved.
+
+### Remove gold and bfd
+
+Once we've switched the default to lld and no major issues remain,
+we should remove gold and bfd.
+
+### lldb debugger
+
+We should make lldb available in the NDK. It's currently shipped as part
+of Studio. Medium-term we should have Studio ship our lldb. Long-term Studio
+should probably use the NDK lldb directly.
 
 ---
 
 ## Unscheduled Work
 
 The following projects are things we intend to do, but have not yet been
-scheduled into the sections above.
-
-### Improve automation in ndkports so we can take on more packages
-
-Before we can take on maintenance for additional packages we need to improve the
-tooling for ndkports. Automation for package updates, testing, and the release
-process would make it possible to expand.
+sheduled into the sections above.
 
 ### Better documentation
 
 We should probably add basic doc comments to the bionic headers:
 
-* One-sentence summary.
-* One paragraph listing any Android differences. (Perhaps worth upstreaming this
-  to man7.org too.)
-* Explain any "flags" arguments (at least giving some idea of which flags)?
-* Explain the return value: what does a `char*` point to? Who owns it? Are
-  errors -1 (as for most functions) or `<errno.h>` values (for
-  `pthread_mutex_lock`)?
-* A "See also" pointing to man7.org?
+  * One-sentence summary.
+  * One paragraph listing any Android differences. (Perhaps worth
+    upstreaming this to man7.org too.)
+  * Explain any "flags" arguments (at least giving some idea of which flags)?
+  * Explain the return value: what does a `char*` point to? Who owns
+    it? Are errors -1 (as for most functions) or `<errno.h>` values (for
+    `pthread_mutex_lock`)?
+  * A "See also" pointing to man7.org?
 
 Should these be in the NDK API reference too? If so, how will we keep
 them from swamping the "real" NDK API?
@@ -140,35 +125,35 @@
 to have gained a new man page viewer that takes precedence),
 and Visual Studio Code has nothing but feature requests.
 
-Beyond writing the documentation, we also should invest some time in improving
-the presentation of the NDK API reference on developer.android.com.
-
 ### Better samples
 
 The samples are low-quality and don't necessarily cover interesting/difficult
 topics.
 
-### Better tools for improving code quality
+### Better tools for improving code quality.
 
 The NDK has long included `gtest` and clang supports various sanitiziers,
 but there are things we can do to improve the state of testing/code quality:
 
-* Test coverage support.
+ * Test coverage support.
+ * Add `gmock`.
+ * Make [GTestJNI] available to developers via some some package manager so
+   developers can integrate their C++ tests into Studio.
 
-### C++ wrappers for NDK APIs
+[GTestJNI]: https://github.com/danalbert/GTestJNI
 
-NDK APIs are C-only for ABI stability reasons.
+### NDK API header-only C++ wrappers
 
-We should offer C++ wrappers as part of an NDK support library (possibly as part
-of Jetpack), even if only to offer the benefits of RAII.  Examples include
-[Bitmap](https://github.com/android-ndk/ndk/issues/822),
+NDK APIs are C-only for ABI stability reasons. We should offer header-only
+C++ wrappers for NDK APIs, even if only to offer the benefits of RAII.
+Examples include [Bitmap](https://github.com/android-ndk/ndk/issues/822),
 [ATrace](https://github.com/android-ndk/ndk/issues/821), and
 [ASharedMemory](https://github.com/android-ndk/ndk/issues/820).
 
-### JNI helpers
+### NDK C++ header-only JNI helpers
 
 Complaints about basic JNI handling are common. We should make libnativehelper
-available as an AAR.
+or something similar available to developers.
 
 ### NDK icu4c wrapper
 
@@ -176,6 +161,11 @@
 the platform. We have a C API wrapper prototype, but we need to make it
 easily available for NDK users.
 
+### More automated libc++ updates
+
+We still need to update libc++ twice: once for the platform, and once
+for the NDK. We also still have two separate test runners.
+
 ### Weak symbols for API additions
 
 iOS developers are used to using weak symbols to refer to function that
@@ -183,43 +173,19 @@
 `minSdkVersion`. We could potentially do something similar. See
 [issue 1003](https://github.com/android-ndk/ndk/issues/1003).
 
-### Make the sysroot a separately installable SDK package
+### C++ Modules
 
-The sysroot in the NDK is currently inherently a part of the NDK because it
-includes libc++ as well as some versioned artifacts like the CRT objects (with
-the ELF note identifying the NDK version that produced them) and
-`android/ndk-version.h`. Moving libc++ to the toolchain solves that coupling,
-and the others are probably tractable.
+By Q2 2019 Clang may have a complete enough implementation of the modules TS and
+Android may have a Clang with those changes available.
 
-While we'd always include the latest stable sysroot in the NDK toolchain so that
-it works out of the box, allowing the sysroot to be provided as a separate SDK
-package makes it easier for users to get new APIs without getting a new
-toolchain (via `compileSdkVersion` the same way it works for Java) and also
-easier for us to ship sysroot updates for preview API levels because they would
-no longer require a full NDK release.
+At least for the current spec (which is in the process of merging with the Clang
+implementation, so could change), the NDK will need to:
 
-### LSan
-
-Leak sanitizer has not been ported for use with Android apps but would be
-helpful to app developers in tracking down memory leaks.
-
-### Portable NDK
-
-The Linux NDK is currently dependent on the version of glibc it was built with.
-To keep the NDK compatible with as many distributions as possible we build
-against a very old version of glibc, but there are still distros that we are
-incompatible with (especially distros that use an alternative libc!). We could
-potentially solve this by statically linking all our dependencies and/or by
-switching from glibc to musl. Not all binaries can be static executables because
-they require dlopen for plugin interfaces (even if our toolchain doesn't
-currently attempt to support user-provided compiler plugins, Polly is
-distributed this way, and we may want to offer such support in the future) so
-there are still some open questions.
-
-### rr debugger
-
-https://rr-project.org/ is a C/C++ debugger that supports replay debugging. We
-should investigate what is required to support that for Android.
+ 1. Support compiling module interfaces.
+ 2. Support either automated discovery (currently very messy) or specification
+    of module dependencies.
+ 3. Begin creating module interfaces for system libraries. Frameworks, libc,
+    libc++, etc.
 
 ---
 
@@ -230,49 +196,6 @@
 
 [history]: https://developer.android.com/ndk/downloads/revision_history.html
 
-### NDK r25
-
-Significantly reduced the size of the NDK. Reverted to older CMake toolchain
-behavior to improve build reliability.
-
-### NDK r24
-
-Neon is now enabled for all armeabi-v7a libraries, improving performance for
-those apps, but dropping Tegra 2 support as a result. Removed support for
-building RenderScript, which was deprecated in Android 12. Removed obsolete GNU
-assembler and GDB. Minimum OS support raised to API 19.
-
-### NDK r23
-
-Migrated all ABIs from libgcc to the LLVM unwinder and libclang_rt. Finished
-migration to LLVM binutils from GNU binutils (with the exception of `as`, which
-remains for one more release). Integrated upstream and NDK CMake support.
-
-### NDK r22
-
-Updated toolchain and libc++. libc++ now supports `std::filesystem`. Make
-updated to 4.3. LLDB included and usable (via `--lldb`) with ndk-gdb. Replaced
-remaining GNU binutils tools with LLVM tools, deprecated GNU binutils. LLD is
-now the default.
-
-### Package management
-
-We shipped [Prefab] and the accompanying support for the Android Gradle Plugin
-to support native dependencies. AGP 4.0 includes the support for importing these
-packages, and 4.1 includes the support for creating AARs that support them.
-
-We also maintain a few packages as part of [ndkports]. Currently curl, OpenSSL,
-JsonCpp, and GoogleTest (includes GoogleMock).
-
-[Prefab]: https://github.com/google/prefab
-[ndkports]: https://android.googlesource.com/platform/tools/ndkports/
-
-### NDK r21 LTS
-
-Updated Clang, LLD, libc++, make, and GDB. Much better LLD behavior on Windows.
-32-bit Windows support removed. Neon by default for all API levels. OpenMP now
-available as both a static and shared library.
-
 ### NDK r20
 
 Updated Clang and libc++, added Q APIs. Improved out-of-the-box Clang behavior.
diff --git a/docs/Testing.md b/docs/Testing.md
index 22a9ffa..3d03b69 100644
--- a/docs/Testing.md
+++ b/docs/Testing.md
@@ -7,309 +7,100 @@
 with `run_tests.py`. See [Building.md] for more instructions on building the
 NDK.
 
+From the NDK source directory (`./ndk` within the directory you ran `repo init`
+in, or the root of the cloned directory if you cloned only the NDK project).
+
+```bash
+$ ./checkbuild.py  # Build the NDK and tests.
+$ ./run_tests.py
+```
+
+Running the tests requires `adb` in your path and compatible devices attached.
+If you're having trouble with the version from the SDK manager, try a version
+built fresh from AOSP.
+
+The test runner will look for any attached devices that match the
+requirements listed in the `devices` section of the test configuration file (see
+[qa\_config.json] for the defaults, or use `--config` to choose your own). Each
+test will be run on all devices compatible with that test.
+
+The full QA configuration takes roughly 6 minutes to run (P920 Linux host, 4
+Galaxy Nexūs for Jelly Bean, 2 Pixels for Pie, 1 emulator for x86-64 Pie).
+Attaching multiple devices will allow the test runner to shard tests among those
+devices.
+
+The tests can be rebuilt without running `checkbuild.py` (which is necessary in
+the case of not having a full NDK checkout, as you might when running the
+Windows tests on a release from the build server) with `run_tests.py --rebuild`.
+
+[qa\_config.json]: ../qa_config.json
 [Building.md]: Building.md
 
-## Prerequisites
 
-1. `adb` must be in your `PATH`.
-2. You must have compatible devices connected. See the "Devices and Emulators"
-   section.
+## Restricting Test Configurations
 
-## tl;dr
+By default, all of the configurations we test are built from both
+`checkbuild.py` and `run_tests.py --rebuild`. This runs tens of thousands of
+test executables. Each test is built in 4 different configurations (once for
+each ABI) at time of writing. The set of configurations built can be restricted
+in two ways.
 
-If you don't care how this works (if you want to know how this works, sorry, but
-you're going to have to read the whole thing) and just want to copy paste
-something that will build and run all the tests:
+First, `run_tests.py --config myconfig.json` will use an alternate test
+configuration file (the default is `qa_config.json`).
+
+Second, and simpler for a development workflow, the following flag can be used
+to restrict the configurations (the presence of any of this flag will override
+the matching entry in the config file, but otherwise the config file is obeyed):
 
 ```bash
-# In the //ndk directory of an NDK `repo init` tree.
-$ poetry shell
-$ ./checkbuild.py  # Build the NDK and tests.
-$ ./run_tests.py  # Pushes the tests to test devices and runs them.
+$ ./run_tests.py --rebuild --abi armeabi-v7a
 ```
 
-**Pay attention to the warnings.**  Running tests requires that the correct set
-of devices are available to adb. If the right devices are not available, **your
-tests will not run**.
+Configuration filtering flags are repeatable. For example, `--abi armeabi-v7a
+--abi x86` will build both armeabi-v7a and x86 tests.
 
-### Typical test cycle for fixing a bug
-
-This section describes the typical way to test and fix a bug in the NDK.
+Beyond restricting test configurations, the tests themselves can be filtered
+with the `--filter` flag:
 
 ```bash
-# All done from //ndk, starting from a clean tree.
-# 1. Update your tree.
-$ repo sync
-# 2. Create a branch for development.
-$ repo start $BRANCH_NAME_FOR_BUG_FIX .
-# 3. Make sure your python dependencies are up to date.
-$ poetry install
-# 4. Enter the poetry environment. You can alternatively prefix all python
-# commands below with `poetry run`.
-$ poetry shell
-# 5. Build the NDK and tests.
-$ ./checkbuild.py
-# 6. Run the tests to make sure everything is passing before you start changing
-# things.
-$ ./run_tests.py
-# 7. Write the regression test for the bug. The new rest of the instructions
-# will assume your new test is called "new_test".
-# 8. Build and run the new test to make sure it catches the bug. The new test
-# should fail. If it doesn't, either your test is wrong or the bug doesn't
-# exist.
-#
-# We use --rebuild here because run_tests.py does not build tests by default,
-# since that's usually a waste of time (see below). We use --filter to ignore
-# everything except our new test.
-$ ./run_tests.py --rebuild --filter new_test
-# 9. Attempt to fix the bug.
-# 10. Rebuild the affected NDK component. If you don't know which component you
-# altered, it's best to just build the whole NDK again
-# (`./checkbuild.py --no-build-tests)`. One case where you can avoid a full
-# rebuild is if the fix is contained to just ndk-build or CMake. We'll assume
-# that's the case here.
-$ ./checkbuild.py --no-build-tests ndk-build
-# 11. Re-build and run the test with the supposedly fixed NDK.
-$ ./run_tests.py --rebuild --filter new_test
-# If the test fails, return to step 9. Otherwise, continue.
-# 12. Rebuild and run *all* the tests to check that your fix didn't break
-# something else. If you only rebuilt a portion of the NDK in step 10, it's best
-# to do a full `./checkbuild.py` here as well (either use `--no-build-tests` or
-# omit `--rebuild` for `run_tests.py` to avoid rebuilding all the tests
-# *twice*).
-$ ./run_tests.py --rebuild
-# If other tests fail, return to step 9. Otherwise, continue.
-# 13. Commit and upload changes. Don't forget to `git add` the new test!
+$ ./run_tests.py --filter test-googletest-full
 ```
 
-## Types of tests
+Test filters support wildcards (as implemented by Python's `fnmatch.fnmatch`).
+The filter flag may be combined with the build configuration flags.
 
-The NDK has a few different types of tests. Each type of test belongs to its own
-"suite", and these suites are defined by the directories in `//ndk/tests`.
-
-### Build tests
-
-Build tests are the tests in [//ndk/tests/build]. These exercise the build
-systems and compilers in ways where it is not important to run the output of the
-build; all that is required for the test to pass is for the build to succeed.
-
-For example, [//ndk/tests/build/cmake-find_library] verifies that CMake's
-`find_library` is able to find libraries in the Android sysroot. If the test
-builds, the feature under test works. We could also run the executable it builds
-on the connected devices, but it wouldn't tell us anything interesting about
-that feature, so we skip that step to save time.
-
-#### Test subtypes
-
-Because the test outputs of build tests do not need to be run, build tests have
-a few subtypes that can test more flexibly than other test types. These are
-`test.py` and `build.sh` tests.
-
-One test directory can be used as more than one type of test. This is quite
-common when a behavior should be tested in both CMake and ndk-build.
-
-The test types in a directory are determined as follows (in order of
-precedence):
-
-1. If there is a `build.sh` file in the directory, it is a `build.sh` test. No
-   other test types will be considered.
-2. If there is a `test.py` file in the directory, it is a `test.py` test. No
-   other test types will be considered.
-3. If there are files matching `jni/*.mk` in the directory, it is an ndk-build
-   test. These tests may co-exist with CMake tests.
-4. If there is a `CMakeLists.txt file in the directory, it is a CMake test.
-   These tests may co-exist with ndk-build tests.
-
-[//ndk/tests/build]: ../tests/build
-[//ndk/tests/build/cmake-find_library]: ../tests/build/cmake-find_library
-
-##### ndk-build
-
-An ndk-build test will treat the directory as an ndk-build project.
-`ndk-build` will build the project for each configuration.
-
-##### CMake
-
-A CMake test will treat the directory as a CMake project. CMake will configure
-and build the project for each configuration.
-
-##### test.py
-
-A `test.py` build test allows the test to customize its execution and results.
-It does this by delegating those details to the `test.py` script in the test
-directory. Any (direct) subdirectory of `//ndk/tests/build` that contains a
-`test.py` file will be executed as this type of test.
-
-**These types of tests are rarely needed.** Unless you need to inspect the
-output of the build, need to build in a very non-standard way, or need to test
-a behavior outside CMake or ndk-build, you probably do not want this type of
-test.
-
-For example, [//ndk/tests/build/NDK_ANALYZE] builds an ndk-build project that
-emits clang static analyzer warnings that the test then checks for.
-
-For some commonly reused `test.py` patterns, there are helpers in [ndk.testing]
-that will simplify writing these forms of tests. Verifying that the build system
-passes a specific flag to the compiler when building is a common pattern, such
-as in [//ndk/tests/build/branch-protection].
-
-[//ndk/tests/build/NDK_ANALYZE]: ../tests/build/NDK_ANALYZE
-[ndk.testing]: ../ndk/testing
-[//ndk/tests/build/branch-protection]: ../tests/build/branch-protection
-
-##### build.sh
-
-A `build.sh` test is similar to a `test.py` test, but with a worse feature set
-in a worse language, and also can't be tested on Windows. Do not write new
-`build.sh` tests. If you need to modify an existing `build.sh` test, consider
-migrating it to `test.py` first.
-
-#### Negative build tests
-
-Most build tests cannot easily check negative test cases, since they typically
-are only verified by the exit status of the build process (`build.sh` and
-`test.py` tests can of course do better). To make a negative test for an
-ndk-build or CMake build test, use the `is_negative_test` `test_config.py`
-option:
-
-```python
-def is_negative_test() -> bool:
-    return True
-```
-
-#### Passing additional command line arguments to build systems
-
-For tests that need to pass specific command line arguments to the build system,
-use the `extra_cmake_flags` and `extra_ndk_build_flags` `test_config.py`
-options:
-
-```python
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_STL=system"]
-
-
-def extra_ndk_build_flags() -> list[str]:
-    return ["NDK_GRADLE_INJECTED_IMPORT_PATH=foo"]
-```
-
-### Device tests
-
-Device tests are the tests in [//ndk/tests/device]. Device tests inherit most of
-their behavior from build tests. It differs from build tests in that the
-executables that are in the build output will be run on compatible attached
-devices (see "Devices and Emulators" further down the page).
-
-These test will be built in the same way as build tests are, although `build.sh`
-and `test.py` tests are not valid for device tests. Each executable in the
-output directory of the build will be treated as a single test case. The
-executables and shared libraries in the output directory will all be pushed to
-compatible devices and run.
-
-[//ndk/tests/build]: ../tests/device
-
-### libc++ tests
-
-libc++ tests are the tests in [//ndk/tests/libc++]. These are a special case of
-device test that are built by LIT (LLVM's test runner) rather than ndk-build or
-CMake, and the test sources are in the libc++ source tree.
-
-As with device tests, executables and shared libraries in the output directory
-will be pushed to the device to be run. The directory structure differs from our
-device tests though because some libc++ tests are sensitive to that. Some tests
-also contain test data that will be pushed alongside the binaries.
-
-You will never write one of these tests in the NDK. If you need to add a test to
-libc++, do it in the upstream LLVM repository. You probably do not need to
-continue reading this section unless you are debugging libc++ test failures or
-test runner behavior.
-
-There is only one "test" in the libc++ test directory. This is not a real test,
-it is just a convenience for the test scanner. The test builder will invoke LIT
-on the libc++ test directory, which will build all the libc++ tests to the test
-output directory. This will emit an xunit report that the test builder parses
-and converts into new "tests" that do nothing but report the result from xunit.
-This is a hack that makes the test results more readable.
-
-[//ndk/tests/build]: ../tests/libc++
-
-### ndk-stack
-
-These are typic Python tests that use the `unittest` library to exercise
-ndk-stack. Unlike all the other tests in the NDK, these are not checked by
-`checkbuild.py` or `run_tests.py`. To run these tests, run:
+Putting this all together, a single test can be rebuilt and run for just
+armeabi-v7a, with the following command:
 
 ```bash
-poetry run pytest tests/ndk-stack/*.py
+$ ./run_tests.py --rebuild \
+    --abi armeabi-v7a \
+    --filter test-googletest-full
 ```
 
-## Controlling test build and execution
-
-### Re-building tests
-
-**The tests will not be rebuilt unless you use `--rebuild`.** `run_tests.py`
-will not _build_ tests unless it is specifically requested because doing so is
-expensive. If you've changed something and need to rebuild the test, use
-`--rebuild` as well as `--filter`.
-
-### Running a subset of tests
-
-To re-check a single test during development, use the `--filter` option of
-`run_tests.py`. For example, `poetry run ./run_tests.py --filter math` will re-
-run the math tests.
-
-To run more than one test, the `--filter` argument does support shell-like
-globbing. `--filter "emutls-*"` will re-run the tests that match the pattern
-`emultls-*`, for example.
-
-Keep in mind that `run_tests.py` will not rebuild tests by default. If you're
-iterating on a single test, you probably need the `--rebuild` flag described
-above to rebuild the test after any changes.
-
-### Restricting test configurations
-
-By default, every variant of the test will be run (and, if using `--rebuild`,
-built). Some test matrix dimensions can be limited to speed up debug iteration.
-If you only need to debug 64-bit Arm, for example, pass `--abi arm64-v8a` to
-`run_tests.py`.
-
-The easiest way to prevent tests from running on API levels you don't want to
-re-check is to just unplug those devices. Alternatively, you can modify
-`qa_config.json` to remove those API levels.
-
-Other test matrix dimensions (such as build system or CMake toolchain file
-variant) cannot currently be filtered.
-
-### Showing all test results
-
-By default `run_tests.py` will only show failing tests. Failing means either
-tests that are expected to pass but failed, or were expected to fail but passed.
-Tests that pass, were skipped due to an invalid configuration, or failed but
-have been marked as a known failure will not be shown unless the `--show-all`
-flag is used. This is helpful for checking that your test really did run rather
-than being skipped, or to verify that your `test_config.py` is correctly
-identifying a known failure.
 
 ## Testing Releases
 
 When testing a release candidate, your first choice should be to run the test
 artifacts built on the build server for the given build. This is the
-ndk-tests.tar.bz2 artifact in the same directory as the NDK zip. Extract the
+ndk-tests.tar.bz2 artifact in the same directory as the NDK tarball. Extract the
 tests somewhere, and then run:
 
 ```bash
-$ ./run_tests.py --clean-device path/to/extracted/tests
+$ ./run_tests.py path/to/extracted/tests
 ```
 
-`--clean-device` is necessary to ensure that the new tests do get pushed to the
-device even if the timestamps on the tests are older than what's currently
-there. If you need to re-run those tests (say, to debug a failing test), you
-will want to omit `--clean-device` for each subsequent run of the same test
-package or each test run will take a very long time.
+For Windows, test artifacts are not available since we cross compile the NDK
+from Linux rather than building on Windows. We want to make sure the Windows
+binaries we build work *on* Windows (using wine would only tell us that they
+work on wine, which may not be bug compatible with Windows), so those must be
+built on the test machine before they can be run. To use the fetched NDK to
+build the tests, run:
 
-The ndk-tests.tar.bz2 artifact will exist for each of the "linux", "darwin_mac",
-and "win64_tests" targets. All of them must be downloaded and run. Running only
-the tests from the linux build will not verify that the windows or darwin NDKs
-produces usable binaries.
+```bash
+$ ./run_tests.py --rebuild --ndk path/to/extracted/ndk out
+```
+
 
 ## Broken and Unsupported Tests
 
@@ -317,90 +108,62 @@
 add a `test_config.py` to the test's root directory (in the same directory as
 `jni/`).
 
-Unsupported tests will not be built or run. They will show as "SKIPPED" if you
-use `--show-all`. Tests should be marked unsupported for configurations that do
-not work **when failure is not a bug**. For example, yasm is an x86 only
-assembler, so the yasm tests are unsupported for non-x86 ABIs.
+Unsupported tests will not be built or run.
 
 Broken tests will be built and run, and the result of the test will be inverted.
 A test that fails will become an "EXPECTED FAILURE" and not be counted as a
 failure, whereas a passing test will become an "UNEXPECTED SUCCESS" and count as
-a failure. Tests should be marked broken **when they are known to fail and that
-failure is a bug to be fixed**. For example, at the time of writing, ASan
-doesn't work on API 21. It's supposed to, so this is a known bug.
+a failure.
 
 By default, `run_tests.py` will hide expected failures from the output since the
-caller is most likely only interested in seeing what effect their change had. To
+user is most likely only interested in seeing what effect their change had. To
 see the list of expected failures, pass `--show-all`.
 
-"Broken" and "unsupported" come in both "build" and "run" variants. This allows
-better fidelity for describing a test that is known to fail at runtime, but
-should build correctly. Such a test would use `run_broken` rather than
-`build_broken`.
-
-Here's an example `test_config.py` that marks the tests in the same directory as
-broken when building for arm64 and unsupported when running on a pre-Lollipop
-device:
+Here's an example `test_config.py` that marks this test as broken when building
+for arm64 and unsupported when running on a pre-Lollipop device:
 
 ```python
-from typing import Optional
-
-from ndk.test.devices import Device
-from ndk.test.types import Test
-
-
-def build_broken(test: Test) -> tuple[Optional[str], Optional[str]]:
-    if test.abi == 'arm64-v8a':
-        return test.abi, 'https://github.com/android-ndk/ndk/issues/foo'
+def build_broken(abi, platform):
+    if abi == 'arm64-v8a':
+        return abi, 'https://github.com/android-ndk/ndk/issues/foo'
     return None, None
 
 
-def run_unsupported(test: Test, device: Device) -> Optional[str]:
-    if device.version < 21:
-        return f'{device.version}'
+def run_unsupported(abi, device_api, name):
+    if device_api < 21:
+        return device_api
     return None
 ```
 
 The `*_broken` checks return a tuple of `(broken_configuration, bug_url)` if the
-given configuration is known to be broken, else `(None, None)`. All known
-failures must have a (public!) bug filed. If there is no bug tracking the
-failure yet, file one on GitHub.
+given configuration is known to be broken, else `(None, None)`.
 
 The `*_unsupported` checks return `broken_configuration` if the given
 configuration is unsupported, else `None`.
 
-The configuration is available in the `Test` and `Device` objects which are
-arguments to each function. Check the definition of each class to find which
-properties can be used, but the most commonly used are:
+The configuration is specified by the following arguments:
 
-* `test.abi`: The ABI being built for.
-* `test.api`: The platform version being *built* for. Not necessarily the
+* `abi`: The ABI being built for.
+* `platform`: The platform version being *built* for. Not necessarily the
   platform version that the test will be run on.
-* `device.version`: The API level of the device the test will be run on.
-* `test.name`: The full name of the test, as would be reported by the test
-  runner. For example, the `fuzz_test` executable built by `tests/device/fuzzer`
-  is named `fuzzer.fuzz_test`. Build tests should never need to use this
-  property, as there is only one test per directory. libc++ tests will most
-  likely prefer `test.case_name` (see below).
-* `test.case_name`: The shortened name of the test case. This property only
-  exists for device tests (for `run_unsupported` and `run_broken`). This
-  property will not exactly match the name of the executable. If the executable
-  is named `foo.pass.cpp.exe`, but `test.case_name` will be `foo.pass`.
+* `device_api`: The API level of the device the test will be run on.
+* `name`: This is the name of the test executable being run. For libc++ tests
+  built by LIT, the executable will be `foo.pass.cpp.exe`, but `name` will be
+  `foo.pass`.
+
 
 ## Devices and Emulators
 
 For testing a release, make sure you're testing against the released user builds
 of Android.
 
-For Nexus/Pixel devices, use https://source.android.com/docs/setup/build/flash
-(Googlers, use http://go/flash). Factory images are also available here:
+For Nexus/Pixel devices, factory images are available here:
 https://developers.google.com/android/nexus/images.
 
 For emulators, use emulator images from the SDK rather than from a platform
-build, as these are what our users will be using. Note that some NDK tests
-(namely test-googletest-full and asan-smoke) are known to break between emulator
-updates. It is not known whether these are NDK bugs, emulator bugs, or x86_64
-system image bugs. Just be aware of them, and update the test config if needed.
+build, as these are what our users will be using. Note that the emulators are
+known to break some NDK tests from update to update (namely test-googletest-full
+and asan-smoke).
 
 After installing the emulator images from the SDK manager, they can be
 configured and launched for testing with (assuming the SDK tools directory is in
@@ -408,100 +171,26 @@
 
 ```bash
 $ android create avd --name $NAME --target android-$LEVEL --abi $ABI
-$ emulator -avd $NAME
+$ emulator -avd $NAME -no-window
 ```
 
-This will create and launch a new virtual device.
+This will create a new virtual device and launch it in a headless state. Note
+that SIGINT will not stop the emulator, and SIGTERM might leave it in a broken
+state. To shut down an emulator, use `adb shell reboot -p`.
 
-Whether physical devices or emulators will be more useful depends on your host
-OS.
+Note that there are no ARM64 emulators whatsoever in the SDK manager. Testing
+ARM64 will require a physical device.
 
-For an x86_64 host, physical devices for the Arm ABIs will be much faster than
-emulation. x86/x86_64 emulators will be virtualized on x86_64 hosts, which are
-very fast.
-
-For M1 Macs, it is very difficult to test x86/x86_64, as devices with those ABIs
-are very rare, and the emulators for M1 Macs are also Arm. For this reason, it's
-easiest to use an x86_64 host for testing x86/x86_64 device behavior.
-
-### Device selection
-
-`run_tests.py` will only consider devices that match the configurations
-specified by `qa_config.json` when running tests. We do not test against every
-supported version of the OS (as much as I'd like to, my desk isn't big enough
-for that many phones), but only the subset specified in that file.
-
-Any connected devices that do not match the configurations specified by
-`qa_config.json` will be ignored. Devices that match the tested configs will be
-pooled to allow sharding.
-
-Each test will be run on every device that it is compatible with. For example,
-a test that was built for armeabi-v7a with a minSdkVersion of 21 will run on all
-device pools that support that ABI with an OS API level of 21 or newer (unless
-otherwise disabled by `run_unsupported`).
-
-**Read the warnings printed at the top of `run_tests.py` output to figure out
-what device configurations your test pools are missing.** If any warnings are
-printed, the configuration named in the warning **will not be tested**. This is
-a warning rather than an error because it is very common to not have all
-configurations available (as mentioned above, it's not viable for M1 Macs to
-check x86 or x86_64). If you cannot test every configuration, be aware of what
-configurations your changes are likely to break and make sure those are at least
-tested. When testing a release, make sure that all configurations have been
-tested before shipping.
-
-`qa_config.json` has the following format:
-
-```json
-{
-  "devices": {
-    "21": [
-      "armeabi-v7a",
-      "arm64-v8a"
-    ],
-    "32": [
-      "armeabi-v7a",
-      "arm64-v8a",
-      "x86_64"
-    ]
-  }
-}
-```
-
-The `devices` section specifies which types of devices should be used for
-running tests. Each key defines the OS API level that should be tested, and the
-value is a list of ABIs that should be checked for that OS version. In the
-example above, tests will be run on each of the following device configurations:
-
-* API 21 armeabi-v7a
-* API 21 arm64-v8a
-* API 32 armeabi-v7a
-* API 32 arm64-v8a
-* API 32 x86_64
-
-The format also supports the infrequently used `abis` and `suites` keys. **You
-probably do not need to read this paragraph.** Each has a list of strings as the
-value. Both can be used to restrict the build configurations of the tests.
-`abis` selects which ABIs to build. This property will be overridden by `--abis`
-if that argument is used, and will default to all ABIs if neither are present,
-which is the normal case. `suites` selects which test suites to build. Valid
-entries in this list are the directory names within `tests`, with the exception
-of `ndk-stack`. In other words (at the time of writing), `build`, `device`, and
-`libc++` are valid items.
 
 ## Windows VMs
 
-Warning: the process below hasn't been tested in a very long time. Googlers
-should refer to http://go/ndk-windows-vm for slightly more up-to-date Google-
-specific setup instructions, but http://go/windows-cloudtop may be easier.
-
 Windows testing can be done on Windows VMs in Google Compute Engine. To create
 one:
 
-* Install the [Google Cloud SDK](https://cloud.google.com/sdk/).
-* Run `scripts/create_windows_instance.py $PROJECT_NAME $INSTANCE_NAME`
-    * The project name is the name of the project you configured for the VMs.
-    * The instance name is whatever name you want to use for the VM.
+ * Install the [Google Cloud SDK](https://cloud.google.com/sdk/).
+ * Run `scripts/create_windows_instance.py $PROJECT_NAME $INSTANCE_NAME`
+   * The project name is the name of the project you configured for the VMs.
+   * The instance name is whatever name you want to use for the VM.
 
 This process will create a `secrets.py` file in the NDK project directory that
 contains the connection information.
diff --git a/docs/Toolchains.md b/docs/Toolchains.md
index 0bae4f6..1b5080d 100644
--- a/docs/Toolchains.md
+++ b/docs/Toolchains.md
@@ -3,17 +3,16 @@
 The latest version of this document is available at
 https://android.googlesource.com/platform/ndk/+/master/docs/Toolchains.md.
 
-The LLVM toolchain shipped in the NDK is not built as a part of the NDK build
-process. Instead is it built separately and checked into git as a prebuilt that
-is repackaged when shipped in the NDK.
+The toolchains shipped in the NDK are not built as a part of the NDK build
+process. Instead they are built separately and checked into git as prebuilts
+that are repackaged when shipped in the NDK. This applies to both Clang and
+binutils.
 
-An artifact of the toolchain build is the distribution tarball. That artifact is
-unpacked into a location in the Android tree and checked in. The NDK build step
-for the toolchain copies that directory into the NDK and makes some
-modifications to make the toolchains suit the NDK rather than the platform.
-For example, non-NDK runtime libraries are deleted and the NDK sysroot is
-installed to the `sysroot` subdirectory of the toolchain so it can be found
-automatically by the compiler.
+Both toolchains are built separately. An artifact of the build is a tarball of
+the compiler for distribution. That artifact is unpacked into a location in the
+Android tree and checked in. The NDK build step for the toolchain copies that
+directory into the NDK and makes minor modifications to make the toolchains suit
+the NDK rather than the platform.
 
 Note: Any changes to either toolchain need to be tested in the platform *and*
 the NDK. The platform and the NDK both get their toolchains from the same build.
@@ -25,48 +24,25 @@
 
 Clang's build process is described in the [Android LLVM Readme]. Note that Clang
 cannot be built from the NDK tree. The output tarball is extracted to
-`prebuilts/clang/host/$HOST/clang-$REVISION`. `checkbuild.py toolchain`
-repackages this into the NDK out directory.
+`prebuilts/clang/host/$HOST/clang-$REVISION`. `checkbuild.py clang` repackages
+this into the NDK out directory.
 
-[Android LLVM Readme]: https://android.googlesource.com/toolchain/llvm_android/+/master/README.md
+[Android Clang Readme]: https://android.googlesource.com/toolchain/llvm_android/+/master/README.md
 
-### Updating to a New Clang
+### Testing Local Changes
 
-If you're updating the NDK to use a new release of the LLVM toolchain, do the
-following.
-
-Note: These steps need to be run after installing the new prebuilt from the
-build server to `prebuilts/clang` (see the [update-prebuilts.py]). The LLVM team
-will handle installing the new toolchain to prebuilts, but the NDK team usually
-makes the change to migrate to the new toolchain as described below.
-
-[update-prebuilts.py]: https://android.googlesource.com/toolchain/llvm_android/+/master/update-prebuilts.py
-
-```bash
-# Edit ndk/toolchains.py and update `CLANG_VERSION`.
-$ ./checkbuild.py
-# ./run_tests.py
-```
-
-### Testing local llvm-toolchain changes with the NDK
-
-If you're working with unsubmitted changes to llvm-toolchain and want to test
-your LLVM changes in the NDK, do the following. If you're just updating the NDK
-to use a newer prebuilt LLVM, you don't need to do this part.
+To test a Clang you just built:
 
 ```bash
 $ export CLANG_PREBUILTS=`realpath ../prebuilts/clang/host/linux-x86`
 $ rm -r $CLANG_PREBUILTS/clang-dev
-# $LLVM_TOOLCHAIN refers to the root of your llvm-toolchain source directory. If
-# you have a tarball for the toolchain distribution, extract that to
-# $CLANG_PREBUILTS/clang-dev instead.
-$ cp -r $LLVM_TOOLCHAIN/out/install/$HOST/clang-dev $CLANG_PREBUILTS/
-# Update CLANG_VERSION in ndk/toolchains.py to clang-dev.
+$ tar xf path/to/clang-dev-linux-x86_64.tar.bz2 -C $CLANG_PREBUILTS
+# Edit ndk/toolchains.py and change the version to 'clang-dev'.
+# Update the VERSION variable in get_llvm_toolchain_binprefix in
+# built/tools/prebuilt-common.sh.
+$ python3 ../prebuilts/ndk/symlink-clang.py dev
 $ ./checkbuild.py
-# Run tests. To run the NDK test suite, you will need to attach the
-# appropriately configured devices. The test tool will print warnings for
-# missing configurations.
-$ ./run_tests.py
+# Run tests.
 ```
 
 For details about running tests, see [Testing.md].
@@ -74,7 +50,10 @@
 [Testing.md]: Testing.md
 
 This installs the new Clang into the prebuilts directory so it can be included
-in the NDK.
+in the NDK. The `symlink-clang.py` line updates the symlinks in prebuilts NDK to
+point at the new Clang. The Clang in `prebuilts/ndk` is used by legacy NDK build
+scripts in ndk/build/tools. The difference between it and `prebuilts/clang` is
+the directory layout, which differs so that `ndk-build` can use it.
 
 If you need to make changes to Clang after running the above steps, future
 updates can be done more quickly with:
@@ -82,8 +61,64 @@
 ```bash
 $ rm -r $CLANG_PREBUILTS/clang-dev
 $ tar xf path/to/clang-dev-linux-x86_64.bz2 -C $CLANG_PREBUILTS
-$ ./checkbuild.py toolchain
+$ ./checkbuild.py toolchain --force-package
 # Run tests.
 ```
 
 We don't need to rebuild the whole NDK since we've already built most of it.
+
+### Updating to a New Clang
+
+These steps need to be run after installing the new prebuilt from the build
+server to `prebuilts/clang` (see the [update-prebuilts.py]).
+
+[update-prebuilts.py]: https://android.googlesource.com/toolchain/llvm_android/+/master/update-prebuilts.py
+
+```bash
+# Edit ndk/toolchains.py and update `CLANG_VERSION`.
+# Update the VERSION variable in get_llvm_toolchain_binprefix in
+# built/tools/prebuilt-common.sh.
+$ ../prebuilts/ndk/symlink-clang.py # Latest version autodetected.
+$ ./checkbuid.py # `--module clang` to build just Clang.
+# Run tests.
+```
+
+## Binutils
+
+Binutils is built using the [build.py] script in the toolchain/binutils.
+
+Unlike Clang, binutils can be built from the NDK tree. The output tarball is
+extracted to `prebuilts/ndk/binutils/$HOST/binutils-$ARCH-$HOST`. Like Clang,
+this is built with `checkbuild.py toolchain`.
+
+[build.py]: https://android.googlesource.com/toolchain/binutils/+/master/build.py
+
+### Testing Local Changes
+
+To test a GCC you just built:
+
+```bash
+$ export INSTALL_DIR=`realpath ../prebuilts/ndk/binutils/$HOST`
+$ rm -r INSTALL_DIR/binutils-$ARCH-$HOST
+$ unzip ../out/dist/binutils-$ARCH-$HOST.zip -d $INSTALL_DIR
+$ ./checkbuild.py
+# Run tests.
+```
+
+For details about running tests, see [Testing.md].
+
+Since the NDK is already built, additional changes will not require a full
+`checkbuild.py`. Instead:
+
+```bash
+$ ./checkbuild.py toolchain
+# Run tests.
+```
+
+### Updating to a New Binutils
+
+```bash
+$ ../prebuilts/ndk/update_binutils.py $BUILD_NUMBER
+$ ./checkbuild.py
+# Run tests.
+```
diff --git a/docs/changelogs/Changelog-r19.md b/docs/changelogs/Changelog-r19.md
index c01e4a9..5af0efa 100644
--- a/docs/changelogs/Changelog-r19.md
+++ b/docs/changelogs/Changelog-r19.md
@@ -116,7 +116,7 @@
      * If you need to continue supporting these devices, add
        `-Wl,--fix-cortex-a8` to your `APP_LDFLAGS` or `CMAKE_C_FLAGS`, but note
        that LLD will not be adding support for this workaround.
-     * Alternatively, use the Play Console to [exclude] Cortex-A8 CPUs to
+     * Alternatively, use the Play Console to [blacklist] Cortex-A8 CPUs to
        disallow your app from being installed on those devices.
  * [Issue 798]: The ndk-build and CMake options to disable RelRO and noexecstack
    are now ignored. All code is built with RelRO and non-executable stacks.
@@ -126,7 +126,7 @@
 [Issue 294]: https://github.com/android-ndk/ndk/issues/294
 [Issue 776]: https://github.com/android-ndk/ndk/issues/776
 [Issue 798]: https://github.com/android-ndk/ndk/issues/798
-[exclude]: https://support.google.com/googleplay/android-developer/answer/7353455?hl=en
+[blacklist]: https://support.google.com/googleplay/android-developer/answer/7353455?hl=en
 [compiler-rt]: https://compiler-rt.llvm.org/
 
 Known Issues
diff --git a/docs/changelogs/Changelog-r21.md b/docs/changelogs/Changelog-r21.md
index 73dda09..1b349cc 100644
--- a/docs/changelogs/Changelog-r21.md
+++ b/docs/changelogs/Changelog-r21.md
@@ -18,105 +18,16 @@
 
 [blog post]: https://android-developers.googleblog.com/2019/06/moving-android-studio-and-android.html
 
- * macOS 10.8 is no longer supported as of r21b (r21 supports 10.8). macOS 10.15
-   requires that binaries be notarized, and notarization is only supported for
-   binaries built for 10.9 or newer.
-
  * [LLD](https://lld.llvm.org/) is now available for testing. AOSP has switched
    to using LLD by default and the NDK will follow (timeline unknown). Test LLD
    in your app by passing `-fuse-ld=lld` when linking. Note that [Issue 843]
    will affect builds using LLD with binutils strip and objcopy as opposed to
    llvm-strip and llvm-objcopy.
 
- * The legacy toolchain install paths will be removed over the coming releases.
-   These paths have been obsolete since NDK r19 and take up a considerable
-   amount of space in the NDK. The paths being removed are:
-
-   * platforms
-   * sources/cxx-stl
-   * sysroot
-   * toolchains (with the exception of toolchains/llvm)
-
-   In general this change should only affect build system maintainers, or those
-   using build systems that are not up to date. ndk-build and the CMake
-   toolchain users are unaffected, and neither are
-   `make_standalone_toolchain.py` users (though that script has been unnecessary
-   since r19).
-
-   For information on migrating away from the legacy toolchain layout, see the
-   [Build System Maintainers Guide] for the NDK version you're using.
-
  * The Play Store will require 64-bit support when uploading an APK beginning in
    August 2019. Start porting now to avoid surprises when the time comes. For
    more information, see [this blog post](https://android-developers.googleblog.com/2017/12/improving-app-security-and-performance.html).
 
-[Build System Maintainers Guide]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-
-## r21e
-
- * [Issue 147772940]: Passing `APP_BUILD_SCRIPT` to ndk-build with a file named
-   something other than Android.mk works again.
-
-[Issue 147772940]: https://issuetracker.google.com/147772940
-
-## r21c
-
- * [Issue 1060]: A macOS app bundle that is signed and notarized is now
-   available for download from our wiki and our website. Note that because only
-   bundles may use RPATHs and pass notarization, the traditional NDK package for
-   macOS **cannot* be notarized.  The SDK will continue to use the traditional
-   package as the app bundle requires layout changes that would make it
-   incompatible with Android Studio.  The NDK is not quarantined when it is
-   downloaded via the SDK manager, so is curently allowed by Gatekeeper.
-
-   **The SDK manager is currently the most reliable way to get the NDK for
-   macOS.**
-
- * [Issue 1207]: Fix fatal error in clang when building with -O2 on arm64.
-
- * [Issue 1239]: Fix network drive issues for clang.
-
- * [Issue 1229]: README.md turned back to ordinary file.
-
-[Issue 1060]: https://github.com/android/ndk/issues/1060
-[Issue 1207]: https://github.com/android/ndk/issues/1207
-[Issue 1239]: https://github.com/android/ndk/issues/1239
-[Issue 1229]: https://github.com/android/ndk/issues/1229
-
-## r21b
-
- * Fixed debugging processes containing Java with gdb. Cherrypicked
-   "gdb: Don't fault for 'maint print psymbols' when using an index", which
-   fixes a bug that caused gdb to fail when debugging a process with Java.
-   Pure C/C++ executables were fine, but this effectively broke all app
-   debugging. The error from gdb that confirms you were affected by this was
-   `gdb-8.3/gdb/psymtab.c:316: internal-error: sect_index_text not initialized`.
- * [Issue 1166]: Rehid unwinder symbols all architectures.
- * [Issue 1173]: Fix gdb python symbol missing issue on Darwin.
- * [Issue 1176]: Fix strip failing with "File truncated" errors on Windows.
- * [Issue 1178]: Revert changes to stdatomic.h to maintain compatibility with
-   C's `_Atomic` type qualifier. Note that the C++ standard will likely mandate
-   this breakage in the future. See [P0943R4] for more details.
- * [Issue 1184]: Fix Clang crash for x86_64.
- * [Issue 1198]: Fix incorrect constant folding of long doubles on Windows.
- * [Issue 1201]: Fixed issue in ndk-build that was causing `APP_PLATFORM` to be
-   corrupted for API 30+ with LP64 ABIs.
- * [Issue 1203]: libc++ prebuilts and CRT objects are no longer built as Neon.
- * [Issue 1205]: Potential fixes for relocation out of range issues with LLD.
- * [Issue 1206]: LLD support for --fix-cortex-a8.
-
-[Issue 1166]: https://github.com/android/ndk/issues/1166
-[Issue 1173]: https://github.com/android/ndk/issues/1173
-[Issue 1176]: https://github.com/android/ndk/issues/1176
-[Issue 1178]: https://github.com/android/ndk/issues/1178
-[Issue 1184]: https://github.com/android/ndk/issues/1184
-[Issue 1198]: https://github.com/android/ndk/issues/1198
-[Issue 1201]: https://github.com/android/ndk/issues/1201
-[Issue 1203]: https://github.com/android/ndk/issues/1203
-[Issue 1205]: https://github.com/android/ndk/issues/1205
-[Issue 1206]: https://github.com/android/ndk/issues/1206
-[P0943R4]: http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2019/p0943r4.html
-
 ## Changes
 
  * Updated Clang and LLD to r365631.
@@ -131,7 +42,7 @@
          * If you need to continue supporting these devices you can disable
            Neon explicitly by setting `LOCAL_ARM_NEON := false` in ndk-build or
            passing `-DANDROID_ARM_NEON=false` to CMake.
-         * Alternatively, use the Play Console to [exclude CPUs] without
+         * Alternatively, use the Play Console to [blacklist CPUs] without
            Neon to disallow your app from being installed on those devices.
      * [Issue 1004]: Fixed bug with bad line number information when building
        Arm64 with `-O0`.
@@ -143,14 +54,10 @@
  * Updated make to 4.2.1.
      * Modified ndk-build to supply `-O` for more readable errors with parallel
        builds.
-     * `abspath` now works properly with Windows drive letters, so if you're
-       using the [workaround] similar to what's found in the NDK samples, you'll
-       need to either drop the workaround or make sure it's only used prior to
-       r21.
  * Updated glibc to 2.17.
  * Updated gdb to 8.3.
  * [Issue 885]: For LLD+LLDB compatibility, the NDK build systems now pass
-   `-Wl,--build-id=sha1` instead of `-Wl,--build-id` when using LLD. Note that
+   `-Wl,--build-id=tree` instead of `-Wl,--build-id` when using LLD. Note that
    the CMake toolchain does not have access to flags set in CMakeLists.txt, so
    using an explicit `-fuse-ld=lld` instead of `ANDROID_LD=lld` will produce
    output that cannot be debugged with Android Studio. Third-party build systems
@@ -177,9 +84,6 @@
    or newer:
 
    ```makefile
-   # This check works even on pre-r21 NDKs. The function is undefined pre-r21,
-   # and calling an undefined function in make returns the empty string, which
-   # is not equal to "true", so the else branch will be taken.
    ifeq ($(call ndk-major-at-least,21),true)
        # Using at least NDK r21.
    else
@@ -188,7 +92,10 @@
    ```
 
    Note that because this API was not available before r21, it cannot be used to
-   determine *which* NDK version earlier than 21 is being used.
+   determine *which* NDK version earlier than 21 is being used, so this API is
+   of limited use today. Also note that the above code will behave correctly
+   even on pre-r21 because calling an undefined function in make returns the
+   empty string, so the else case will be taken.
  * [Issue 1092]: Fixed hiding of unwinder symbols in outputs of ndk-build and
    CMake. Maintainers of third-party build systems should apply similar fixes
    when using NDK r19 and above to guard against possible future compatibility
@@ -205,9 +112,8 @@
 [Issue 885]: https://github.com/android-ndk/ndk/issues/885
 [Issue 916]: https://github.com/android-ndk/ndk/issues/916
 [Issue 976]: https://github.com/android/ndk/issues/976
-[exclude CPUs]: https://support.google.com/googleplay/android-developer/answer/7353455?hl=en
+[blacklist CPUs]: https://support.google.com/googleplay/android-developer/answer/7353455?hl=en
 [maintainer_linkers]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md#Linkers
-[workaround]: https://github.com/android/ndk-samples/blob/2c97a9eb5b9b5de233b7ece4dd0d0d28fa4cb4c2/other-builds/ndkbuild/common.mk#L26
 
 ## Known Issues
 
diff --git a/docs/changelogs/Changelog-r22.md b/docs/changelogs/Changelog-r22.md
deleted file mode 100644
index 9295feb..0000000
--- a/docs/changelogs/Changelog-r22.md
+++ /dev/null
@@ -1,178 +0,0 @@
-# Changelog
-
-Report issues to [GitHub].
-
-For Android Studio issues, follow the docs on the [Android Studio site].
-
-[GitHub]: https://github.com/android/ndk/issues
-[Android Studio site]: http://tools.android.com/filing-bugs
-
-## Announcements
-
-* GNU binutils is deprecated and will be removed in an upcoming NDK release.
-  Note that the GNU assembler (`as`) **is** a part of this. If you are building
-  with `-fno-integrated-as`, file bugs if anything is preventing you from
-  removing that flag. If you're using `as` directly, use `clang` instead.
-
-* [LLD](https://lld.llvm.org/) is now the default linker. ndk-build and our
-  CMake toolchain file have also migrated to using llvm-ar and llvm-strip.
-
-  See the Changes section below for more information.
-
-## r22b
-
-* [Issue 1377]: Fix Clang backend crash in register scavenging.
-* [Issue 1388]: Fixed LLVM's incorrect conversion to list files for long command
-  lines on Windows.
-* [Issue 1391]: Fixed missing symbols from libz.a.
-* [Issue 1427]: Fixed Clang auto-detection for CMake 3.19 and older for Windows.
-
-[Issue 1377]: https://github.com/android/ndk/issues/1377
-[Issue 1388]: https://github.com/android/ndk/issues/1388
-[Issue 1391]: https://github.com/android/ndk/issues/1391
-[Issue 1427]: https://github.com/android/ndk/issues/1427
-
-## Changes
-
-* Updated LLVM to r399163b, based on LLVM 11 development.
-  * [Issue 829]: Fixed issue with `__attribute__((visibility("hidden")))`
-    symbols sometimes not being hidden.
-  * [Issue 1149]: Fixed Clang crash with `#pragma detect_mismatch`.
-  * [Issue 1212]: Fixed llvm-strip to match GNU behavior for removing file
-    symbols.
-  * [Issue 1248]: Fixed LLD Neon crash.
-  * [Issue 1303]: Fixed Neon intrinsic optimizer crash.
-
-* Updated make to 4.3.
-
-* Updated libc++, libc++abi, and libunwind to
-  https://github.com/llvm/llvm-project/commit/52ec983895436089c5be0b0c4d967423db16045b.
-
-* [Issue 609]: `std::filesystem` support is now included. There are two known
-  issues:
-   * [Issue 1258]: `std::filesystem::perm_options::nofollow` may not be
-     honored on old devices.
-   * [Issue 1260]: `std::filesystem::canonical` will incorrectly succeed when
-     passed a non-existent path on old devices.
-
-* [Issue 843]: `llvm-strip` is now used instead of `strip` to avoid breaking
-   RelRO with LLD. Note that the Android Gradle Plugin performs its own
-   stripping, so most users will need to upgrade to Android Gradle Plugin
-   version 4.0 or newer to get the fix.
-
-* [Issue 1130]: Fixed undefined references to new that could occur when building
-  for APIs prior to 21 and the static libc++. Note that LLD appears to have been
-  unaffected, but the problem is still present for ndk-build when using the
-  deprecated linkers.
-
-* [Issue 1139]: `native_app_glue` now hooks up the `APP_CMD_WINDOW_RESIZED`,
-  `APP_CMD_WINDOW_REDRAW_NEEDED`, and `APP_CMD_CONTENT_RECT_CHANGED` messages.
-
-* [Issue 1196]: Backtraces for crashes on devices older than API 29 are now
-  correct when using LLD if using ndk-build or the CMake toolchain file. If
-  using a different system and targeting devices older than API 29, use
-  `-Wl,--no-rosegment` when linking. See the [Build System Maintainers Guide]
-  for more information.
-
-* The deprecated `<NDK>/platforms` and `<NDK>/sysroot` directories have been
-  removed. These directories were merged and relocated into the toolchain during
-  r19. The location of these contents should not be relevant to anyone,
-  including build systems, since the toolchain handles them implicitly. If you
-  are using a build system that hasn't adapted to the changes introduced in NDK
-  r19, file a bug with your build system maintainer. See the [Build System
-  Maintainers Guide] for information on using the NDK in your own build system.
-
-* `llvm-ar` is now used instead of `ar`.
-
-* [Issue 1200]: Fixed an issue with using `dlclose` with libraries using
-  `thread_local` with non-trivial destructors and the static libc++.
-
-* The legacy libc++ linker scripts in `<NDK>/sources/cxx-stl/llvm-libc++` have
-  been removed. The linkers scripts in the toolchain should be used instead as
-  described by the [Build System Maintainers Guide].
-
-* LLD is now used by default. If your build is not yet compatible with LLD, you
-  can continue using the deprecated linkers, set `APP_LD=deprecated` for
-  ndk-build, `ANDROID_LD=deprecated` for CMake, or use an explicit
-  `-fuse-ld=gold` or `-fuse-ld=bfd` in your custom build system. If you
-  encounter issues be sure to file a bug, because this will not be an option in
-  a subsequent release.
-
-  Note that [Issue 843] will affect builds using LLD with binutils strip and
-  objcopy as opposed to llvm-strip and llvm-objcopy.
-
-* ndk-gdb now uses lldb as the debugger. gdb is deprecated and will be removed in
-  a future release. To fall back to gdb, use --no-lldb option. But please
-  [file a bug] explaining why you couldn't use lldb.
-
-[Build System Maintainers Guide]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-[Issue 609]: https://github.com/android/ndk/issues/609
-[Issue 829]: https://github.com/android/ndk/issues/829
-[Issue 1139]: https://github.com/android/ndk/issues/1139
-[Issue 1149]: https://github.com/android/ndk/issues/1149
-[Issue 1196]: https://github.com/android/ndk/issues/1196
-[Issue 1200]: https://github.com/android/ndk/issues/1200
-[Issue 1212]: https://github.com/android/ndk/issues/1212
-[Issue 1248]: https://github.com/android/ndk/issues/1248
-[Issue 1258]: https://github.com/android/ndk/issues/1258
-[Issue 1260]: https://github.com/android/ndk/issues/1260
-[Issue 1303]: https://github.com/android/ndk/issues/1303
-[file a bug]: https://github.com/android/ndk/issues/new/choose
-
-## Known Issues
-
-* This is not intended to be a comprehensive list of all outstanding bugs.
-* [Issue 360]: `thread_local` variables with non-trivial destructors will cause
-  segfaults if the containing library is `dlclose`ed on devices running M or
-  newer, or devices before M when using a static STL. The simple workaround is
-  to not call `dlclose`.
-* [Issue 906]: Clang does not pass `-march=armv7-a` to the assembler when using
-  `-fno-integrated-as`. This results in the assembler generating ARMv5
-  instructions. Note that by default Clang uses the integrated assembler which
-  does not have this problem. To workaround this issue, explicitly use
-  `-march=armv7-a` when building for 32-bit ARM with the non-integrated
-  assembler, or use the integrated assembler. ndk-build and CMake already
-  contain these workarounds.
-* [Issue 988]: Exception handling when using ASan via wrap.sh can crash. To
-  workaround this issue when using libc++_shared, ensure that your
-  application's libc++_shared.so is in `LD_PRELOAD` in your `wrap.sh` as in the
-  following example:
-
-  ```bash
-  #!/system/bin/sh
-  HERE="$(cd "$(dirname "$0")" && pwd)"
-  export ASAN_OPTIONS=log_to_syslog=false,allow_user_segv_handler=1
-  ASAN_LIB=$(ls $HERE/libclang_rt.asan-*-android.so)
-  if [ -f "$HERE/libc++_shared.so" ]; then
-      # Workaround for https://github.com/android/ndk/issues/988.
-      export LD_PRELOAD="$ASAN_LIB $HERE/libc++_shared.so"
-  else
-      export LD_PRELOAD="$ASAN_LIB"
-  fi
-  "$@"
-   ```
-
-  There is no known workaround for libc++_static.
-
-  Note that because this is a platform bug rather than an NDK bug this
-  workaround will be necessary for this use case to work on all devices until
-  at least Android R.
-* [Issue 1130]: When using `c++_static` and the deprecated linker with ndk-build
-  with an `APP_PLATFORM` below 21, undefined references to operator new may
-  occur. The fix is to use LLD.
-* This version of the NDK is incompatible with the Android Gradle plugin
-  version 3.0 or older. If you see an error like
-  `No toolchains found in the NDK toolchains folder for ABI with prefix: mips64el-linux-android`,
-  update your project file to [use plugin version 3.1 or newer]. You will also
-  need to upgrade to Android Studio 3.1 or newer.
-* [Issue 843]: Using LLD with binutils `strip` or `objcopy` breaks RelRO. Use
-   `llvm-strip` and `llvm-objcopy` instead. This issue has been resolved in
-   Android Gradle Plugin version 4.0 (for non-Gradle users, the fix is also in
-   ndk-build and our CMake toolchain file), but may affect other build systems.
-
-[Issue 360]: https://github.com/android/ndk/issues/360
-[Issue 843]: https://github.com/android/ndk/issues/843
-[Issue 906]: https://github.com/android/ndk/issues/906
-[Issue 988]: https://github.com/android/ndk/issues/988
-[Issue 1130]: https://github.com/android/ndk/issues/1130
-[use plugin version 3.1 or newer]: https://developer.android.com/studio/releases/gradle-plugin#updating-plugin
diff --git a/docs/changelogs/Changelog-r23.md b/docs/changelogs/Changelog-r23.md
deleted file mode 100644
index 8d4d9b1..0000000
--- a/docs/changelogs/Changelog-r23.md
+++ /dev/null
@@ -1,221 +0,0 @@
-# Changelog
-
-Report issues to [GitHub].
-
-For Android Studio issues, follow the docs on the [Android Studio site].
-
-If you're a build system maintainer that needs to use the tools in the NDK
-directly, see the [build system maintainers guide].
-
-[GitHub]: https://github.com/android/ndk/issues
-[Android Studio site]: http://tools.android.com/filing-bugs
-[build system maintainers]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-
-## Announcements
-
-* GNU binutils, excluding the GNU Assembler (GAS), has been removed. GAS will be
-  removed in the next release. If you are building with `-fno-integrated-as`,
-  file bugs if anything is preventing you from removing that flag. See
-  [Clang Migration Notes] for advice about making assembly compatible with LLVM.
-
-* Support for GDB has ended. GDB will be removed in the next release. Use LLDB
-  instead. Note that `ndk-gdb` uses LLDB by default.
-
-* NDK r23 is the last release that will support non-Neon. Beginning with NDK
-  r24, the armeabi-v7a libraries in the sysroot will be built with Neon. A very
-  small number of very old devices do not support Neon so most apps will not
-  notice aside from the performance improvement.
-
-* Jelly Bean (APIs 16, 17, and 18) will not be supported in the next NDK
-  release. The minimum OS supported by the NDK for r24 will be KitKat (API level
-  19).
-
-[Clang Migration Notes]: https://android.googlesource.com/platform/ndk/+/master/docs/ClangMigration.md
-
-## r23c
-
-* Update LLVM to clang-r416183c2, based on LLVM 12 development.
-  * [Issue 1590]: Fixed LLDB crashes when calling help on unknown commands.
-  * [Issue 1608]: Fixed crash in vector conversions.
-  * [Issue 1619]: Fixed performance regression in arm64 vectorization.
-  * [Issue 1645]: Fixed crash caused by openmp master/critical pragmas.
-  * [Issue 1672]: Fixed armeabi-v7a libunwind.a to be compatible with vfpv3-d16
-    (remember that this is the last release that will support that FPU setting).
-* [Issue 1410]: Fix ndk-build for Apple M1.
-* [Issue 1546]: Universal binaries (M1 support) for make (affects ndk-build).
-* [Issue 1547]: ndk-build now uses Python 3 internally, improving M1 support.
-* [Issue 1577]: Universal binaries (M1 support) for shader-tools (vulkan
-  compilers).
-* [Issue 1569]: Fix `-fno-integrated-as` for Linux and Darwin hosts by making
-  GAS symlink relative
-* [Issue 1589]: Fix incorrect API level check for `-Wl,--no-rosegment` in
-  ndk-build and CMake.
-* [Issue 1593]: Improved ndk-which to fall back to LLVM tools when the GNU names
-  are used. For example, `ndk-which strip` will now return the path to
-  `llvm-strip` instead of nothing.
-* [Issue 1610]: Fix `ANDROID_NATIVE_API_LEVEL` CMake variable when using the
-  non-legacy toolchain file.
-* [Issue 1618]: Fix behavior of `ANDROID_CPP_FEATURES` with the new toolchain
-  file.
-* [Issue 1634]: Fixed the build rule for the libshaderc_combined target.
-* [Issue 1656]: The new CMake toolchain file now ignores `ANDROID_ARM_MODE` when
-  it is passed for ABIs other than armeabi-v7a like the legacy toolchain file
-  did. With CMake 3.22 it is an error to set `CMAKE_ANDROID_ARM_MODE` for other
-  ABIs, so this fixes a potential incompatibility between the legacy and new
-  toolchains when using CMake 3.22+.
-* [Issue 1693]: The NDK's toolchain file for CMake (`android.toolchain.cmake`)
-  defaults to the legacy toolchain file for all versions of CMake. The new
-  toolchain file can still be enabled using
-  `-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF`.
-
-[Issue 1410]: https://github.com/android/ndk/issues/1410
-[Issue 1546]: https://github.com/android/ndk/issues/1546
-[Issue 1577]: https://github.com/android/ndk/issues/1577
-[Issue 1589]: https://github.com/android/ndk/issues/1589
-[Issue 1590]: https://github.com/android/ndk/issues/1590
-[Issue 1593]: https://github.com/android/ndk/issues/1593
-[Issue 1608]: https://github.com/android/ndk/issues/1608
-[Issue 1610]: https://github.com/android/ndk/issues/1610
-[Issue 1618]: https://github.com/android/ndk/issues/1618
-[Issue 1619]: https://github.com/android/ndk/issues/1619
-[Issue 1634]: https://github.com/android/ndk/issues/1634
-[Issue 1645]: https://github.com/android/ndk/issues/1645
-[Issue 1656]: https://github.com/android/ndk/issues/1656
-[Issue 1672]: https://github.com/android/ndk/issues/1672
-[Issue 1693]: https://github.com/android/ndk/issues/1693
-
-## r23b
-
-* Update LLVM to clang-r416183c1, based on LLVM 12 development.
-  * [Issue 1540]: Fixed compiler crash when using coroutines.
-  * [Issue 1544]: Now uses universal binaries for M1 Macs.
-  * [Issue 1551]: Prevent each translation unit from receiving its own copy of
-    emulated thread-local global variables.
-  * [Issue 1555]: Fixed compiler crash for armeabi-v7a.
-* [Issue 1492]: ndk-build.cmd: Stop using make's `-O` (`--output-sync`) flag on
-  Windows to avoid `fcntl(): Bad file descriptor` error.
-* [Issue 1553]: Updated sysroot to latest Android 12.
-* [Issue 1569]: Fixed `-fno-integrated-as` not being able to find the assembler.
-* CMake changes:
-  * [Issue 1536]: Make optimization flags used with CMake more consistent.
-    Historically thumb release builds used `-Oz`, but AGP switched to using
-    `RelWithDebInfo` for release builds in the latest release which was not
-    using `-Oz`. To reduce per-arch differences and behavior differences
-    compared to CMake's defaults, `-Oz` use was removed. You may see code size
-    increases for armeabi-v7a due to this, but also increased optimization. To
-    restore the prior behavior, add `-Oz` to your cflags.
-  * [Issue 1560]: Fixed pull-up of unsupported API levels when using the new
-    CMake toolchain file. This affects CMake 3.21 and
-    `ANDROID_USE_LEGACY_TOOLCHAIN_FILE=ON` use cases, and was the common case
-    for AGP users with a `minSdkVersion` below 21.
-  * [Issue 1573]: Fixed `ANDROID_USE_LEGACY_TOOLCHAIN_FILE` not being obeyed
-    during CMake try-compile.
-  * [Issue 1581]: Added workaround for [CMake Issue 22647], which was causing
-    `MINGW` to be incorrectly defined by CMake when building for Android on a
-    Windows host. This only affected those using the Android toolchain file when
-    CMake 3.21 or newer was used. This likely was not a regression for users not
-    using the Android toolchain. The change will fix both use cases.
-
-[CMake Issue 22647]: https://gitlab.kitware.com/cmake/cmake/-/issues/22647
-[Issue 1492]: https://github.com/android/ndk/issues/1492
-[Issue 1536]: https://github.com/android/ndk/issues/1536
-[Issue 1540]: https://github.com/android/ndk/issues/1540
-[Issue 1544]: https://github.com/android/ndk/issues/1544
-[Issue 1547]: https://github.com/android/ndk/issues/1547
-[Issue 1551]: https://github.com/android/ndk/issues/1551
-[Issue 1553]: https://github.com/android/ndk/issues/1553
-[Issue 1555]: https://github.com/android/ndk/issues/1555
-[Issue 1560]: https://github.com/android/ndk/issues/1560
-[Issue 1569]: https://github.com/android/ndk/issues/1569
-[Issue 1573]: https://github.com/android/ndk/issues/1573
-[Issue 1581]: https://github.com/android/ndk/issues/1581
-
-## Changes
-
-* Includes Android 12 APIs.
-* Updated LLVM to clang-r416183b, based on LLVM 12 development.
-  * [Issue 1047]: Fixes crash when using ASan with the CFI unwinder.
-  * [Issue 1096]: Includes support for [Polly]. Enable by adding `-mllvm -polly`
-    to your cflags.
-  * [Issue 1230]: LLVM's libunwind is now used instead of libgcc for all
-    architectures rather than just 32-bit Arm.
-  * [Issue 1231]: LLVM's libclang_rt.builtins is now used instead of libgcc.
-  * [Issue 1406]: Fixes crash with Neon intrinsic.
-* Vulkan validation layer source and binaries are no longer shipped in the NDK.
-  The latest are now posted directly to [GitHub](https://github.com/KhronosGroup/Vulkan-ValidationLayers/releases).
-* Vulkan tools source is also removed, specifically vulkan_wrapper.
-  It should be downloaded upstream from [GitHub](https://github.com/KhronosGroup/Vulkan-Tools).
-* The toolchain file (android.toolchain.cmake) is refactored to base on CMake's
-  integrated Android support. This new toolchain file will be enabled by default
-  for CMake 3.21 and newer. No user side change is expected. But if anything goes
-  wrong, please file a bug and set `ANDROID_USE_LEGACY_TOOLCHAIN_FILE=ON` to
-  restore the legacy behavior.
-    * When using the new behavior (when using CMake 3.21+ and not explicitly
-      selecting the legacy toolchain), **default build flags may change**. One
-      of the primary goals was to reduce the behavior differences between our
-      toolchain and CMake, and CMake's default flags do not always match the
-      legacy toolchain file. Most notably, if using `CMAKE_BUILD_TYPE=Release`,
-      your optimization type will likely be `-O3` instead of `-O2` or `-Oz`. See
-      [Issue 1536] for more information.
-* [Issue 929]: `find_library` now prefers shared libraries from the sysroot over
-  static libraries.
-* [Issue 1390]: ndk-build now warns when building a static executable with the
-  wrong API level.
-* [Issue 1452]: `NDK_ANALYZE=1` now sets `APP_CLANG_TIDY=true` rather than using
-  scan-build. clang-tidy performs all the same checks by default, and scan-build
-  was no longer working. See the bug for more details, but no user-side changes
-  should be needed.
-
-[Issue 929]: https://github.com/android/ndk/issues/929
-[Issue 1047]: https://github.com/android/ndk/issues/1047
-[Issue 1096]: https://github.com/android/ndk/issues/1096
-[Issue 1230]: https://github.com/android/ndk/issues/1230
-[Issue 1231]: https://github.com/android/ndk/issues/1231
-[Issue 1390]: https://github.com/android/ndk/issues/1390
-[Issue 1406]: https://github.com/android/ndk/issues/1406
-[Issue 1452]: https://github.com/android/ndk/issues/1452
-[Issue 1536]: https://github.com/android/ndk/issues/1536
-[Polly]: https://polly.llvm.org/
-
-## Known Issues
-
-* This is not intended to be a comprehensive list of all outstanding bugs.
-* [Issue 360]: `thread_local` variables with non-trivial destructors will cause
-  segfaults if the containing library is `dlclose`ed on devices running M or
-  newer, or devices before M when using a static STL. The simple workaround is
-  to not call `dlclose`.
-* [Issue 906]: Clang does not pass `-march=armv7-a` to the assembler when using
-  `-fno-integrated-as`. This results in the assembler generating ARMv5
-  instructions. Note that by default Clang uses the integrated assembler which
-  does not have this problem. To workaround this issue, explicitly use
-  `-march=armv7-a` when building for 32-bit ARM with the non-integrated
-  assembler, or use the integrated assembler. ndk-build and CMake already
-  contain these workarounds.
-* [Issue 988]: Exception handling when using ASan via wrap.sh can crash. To
-  workaround this issue when using libc++_shared, ensure that your
-  application's libc++_shared.so is in `LD_PRELOAD` in your `wrap.sh` as in the
-  following example:
-
-  ```bash
-  #!/system/bin/sh
-  HERE="$(cd "$(dirname "$0")" && pwd)"
-  export ASAN_OPTIONS=log_to_syslog=false,allow_user_segv_handler=1
-  ASAN_LIB=$(ls $HERE/libclang_rt.asan-*-android.so)
-  if [ -f "$HERE/libc++_shared.so" ]; then
-      # Workaround for https://github.com/android/ndk/issues/988.
-      export LD_PRELOAD="$ASAN_LIB $HERE/libc++_shared.so"
-  else
-      export LD_PRELOAD="$ASAN_LIB"
-  fi
-  "$@"
-   ```
-
-  There is no known workaround for libc++_static.
-
-  Note that because this is a platform bug rather than an NDK bug this
-  workaround will be necessary for this use case to work on all devices until
-  at least Android R.
-
-[Issue 360]: https://github.com/android/ndk/issues/360
-[Issue 906]: https://github.com/android/ndk/issues/906
-[Issue 988]: https://github.com/android/ndk/issues/988
diff --git a/docs/changelogs/Changelog-r24.md b/docs/changelogs/Changelog-r24.md
deleted file mode 100644
index 773a739..0000000
--- a/docs/changelogs/Changelog-r24.md
+++ /dev/null
@@ -1,161 +0,0 @@
-# Changelog
-
-Report issues to [GitHub].
-
-For Android Studio issues, follow the docs on the [Android Studio site].
-
-If you're a build system maintainer that needs to use the tools in the NDK
-directly, see the [build system maintainers guide].
-
-[GitHub]: https://github.com/android/ndk/issues
-[Android Studio site]: http://tools.android.com/filing-bugs
-[build system maintainers]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-
-## Announcements
-
-* The GNU Assembler (GAS), has been removed. If you were building with
-  `-fno-integrated-as` you'll need to remove that flag. See
-  [Clang Migration Notes] for advice on making assembly compatible with LLVM.
-
-* GDB has been removed. Use LLDB instead. Note that `ndk-gdb` uses LLDB by
-  default, and Android Studio has only ever supported LLDB.
-
-* Jelly Bean (APIs 16, 17, and 18) is no longer supported. The minimum OS
-  supported by the NDK is KitKat (API level 19).
-
-* Non-Neon devices are no longer supported. A very small number of very old
-  devices do not support Neon so most apps will not notice aside from the
-  performance improvement.
-
-* RenderScript build support has been removed. RenderScript was
-  [deprecated](https://developer.android.com/about/versions/12/deprecations#renderscript)
-  in Android 12. If you have not finished migrating your apps away from
-  RenderScript, NDK r23 LTS can be used.
-
-[Clang Migration Notes]: https://android.googlesource.com/platform/ndk/+/master/docs/ClangMigration.md
-
-## r24b
-
-* [Issue 1693]: The NDK's toolchain file for CMake (`android.toolchain.cmake`)
-  defaults to the legacy toolchain file for all versions of CMake. The new
-  toolchain file can still be enabled using
-  `-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF`.
-
-[Issue 1693]: https://github.com/android/ndk/issues/1693
-
-## Changes
-
-* Includes Android 12L APIs.
-* Updated LLVM to clang-r437112b, based on LLVM 14 development.
-  * [Issue 1590]: Fix LLDB help crash.
-* [Issue 1108]: Removed `mbstowcs` and `wcstombs` from the pre-API 21 stubs and
-  moved the implementation to `libandroid_support` to fix those APIs on old
-  devices.
-* [Issue 1299]: Additional Apple M1 support:
-  * [Issue 1410]: Fixed incorrect host tool directory identification in
-    ndk-build on M1 macs.
-  * [Issue 1544]: LLVM tools are now universal binaries.
-  * [Issue 1546]: Make is now a universal binary.
-* [Issue 1479]: Added `LOCAL_BRANCH_PROTECTION` option to ndk-build for using
-  `-mbranch-protection` with aarch64 without breaking other ABIs. Example use:
-  `LOCAL_BRANCH_PROTECTION := standard`.
-* [Issue 1492]: Windows Make now works with `-O`, and ndk-build now uses it by
-  default.
-* [Issue 1559]: Added `LOCAL_ALLOW_MISSING_PREBUILT` option to
-  `PREBUILT_SHARED_LIBRARY` and `PREBUILT_STATIC_LIBRARY` which defers failures
-  for missing prebuilts to build time. This enables use cases within AGP where
-  one module provides "pre" built libraries to another module.
-* [Issue 1587]: ndk-stack is now tolerant of unsorted zip infos.
-* [Issue 1589]: Fixed broken stack traces on API 29 devices when using a
-  minSdkVersion of 29.
-* [Issue 1593]: Improved ndk-which to fall back to LLVM tools when the GNU names
-  are used. For example, `ndk-which strip` will now return the path to
-  `llvm-strip` instead of nothing.
-* [Issue 1610]: Fixed handling of `ANDROID_NATIVE_API_LEVEL` in the new CMake
-  toolchain file.
-* [Issue 1618]: Corrected `CMAKE_ANDROID_EXCEPTIONS` behavior for the new CMake
-  toolchain file.
-* [Issue 1623]: Fixed behavior of the legacy CMake toolchain file when used with
-  new versions of CMake (incompatible `-gcc-toolchain` argument).
-* [Issue 1656]: The new CMake toolchain file now ignores `ANDROID_ARM_MODE` when
-  it is passed for ABIs other than armeabi-v7a like the legacy toolchain file
-  did. With CMake 3.22 it is an error to set `CMAKE_ANDROID_ARM_MODE` for other
-  ABIs, so this fixes a potential incompatibility between the legacy and new
-  toolchains when using CMake 3.22+.
-* Removed `make-standalone-toolchain.sh`. This was broken in a previous release
-  and it was unnoticed, so it seems unused. `make_standalone_toolchain.py`
-  remains, but neither has been needed since NDK r19 since the toolchain can be
-  invoked directly.
-
-[Issue 1108]: https://github.com/android/ndk/issues/1108
-[Issue 1299]: https://github.com/android/ndk/issues/1299
-[Issue 1410]: https://github.com/android/ndk/issues/1410
-[Issue 1479]: https://github.com/android/ndk/issues/1479
-[Issue 1492]: https://github.com/android/ndk/issues/1492
-[Issue 1544]: https://github.com/android/ndk/issues/1544
-[Issue 1546]: https://github.com/android/ndk/issues/1546
-[Issue 1559]: https://github.com/android/ndk/issues/1559
-[Issue 1587]: https://github.com/android/ndk/issues/1587
-[Issue 1589]: https://github.com/android/ndk/issues/1589
-[Issue 1590]: https://github.com/android/ndk/issues/1590
-[Issue 1593]: https://github.com/android/ndk/issues/1593
-[Issue 1610]: https://github.com/android/ndk/issues/1610
-[Issue 1618]: https://github.com/android/ndk/issues/1618
-[Issue 1623]: https://github.com/android/ndk/issues/1623
-[Issue 1656]: https://github.com/android/ndk/issues/1656
-
-## Known Issues
-
-This is not intended to be a comprehensive list of all outstanding bugs.
-
-* [Issue 360]: `thread_local` variables with non-trivial destructors will cause
-  segfaults if the containing library is `dlclose`ed. This was fixed in API 28,
-  but code running on devices older than API 28 will need a workaround. The
-  simplest fix is to **stop calling `dlclose`**. If you absolutely must continue
-  calling `dlclose`, see the following table:
-
-  |                   | Pre-API 23           |  APIs 23-27   | API 28+ |
-  | ----------------- | -------------------- | ------------- | ------- |
-  | No workarounds    | Works for static STL | Broken        | Works   |
-  | `-Wl,-z,nodelete` | Works for static STL | Works         | Works   |
-  | No `dlclose`      | Works                | Works         | Works   |
-
-  If your code must run on devices older than M (API 23) and you cannot use the
-  static STL (common), **the only fix is to not call `dlclose`**, or to stop
-  using `thread_local` variables with non-trivial destructors.
-
-  If your code does not need to run on devices older than API 23 you can link
-  with `-Wl,-z,nodelete`, which instructs the linker to ignore `dlclose` for
-  that library. You can backport this behavior by not calling `dlclose`.
-
-  The fix in API 28 is the standardized inhibition of `dlclose`, so you can
-  backport the fix to older versions by not calling `dlclose`.
-
-* [Issue 988]: Exception handling when using ASan via wrap.sh can crash. To
-  workaround this issue when using libc++_shared, ensure that your application's
-  libc++_shared.so is in `LD_PRELOAD` in your `wrap.sh` as in the following
-  example:
-
-  ```bash
-  #!/system/bin/sh
-  HERE="$(cd "$(dirname "$0")" && pwd)"
-  export ASAN_OPTIONS=log_to_syslog=false,allow_user_segv_handler=1
-  ASAN_LIB=$(ls $HERE/libclang_rt.asan-*-android.so)
-  if [ -f "$HERE/libc++_shared.so" ]; then
-      # Workaround for https://github.com/android/ndk/issues/988.
-      export LD_PRELOAD="$ASAN_LIB $HERE/libc++_shared.so"
-  else
-      export LD_PRELOAD="$ASAN_LIB"
-  fi
-  "$@"
-   ```
-
-  There is no known workaround for libc++_static.
-
-  Note that because this is a platform bug rather than an NDK bug this cannot be
-  fixed with an NDK update. This workaround will be necessary for code running
-  on devices that do not contain the fix, and the bug has not been fixed even in
-  the latest release of Android.
-
-[Issue 360]: https://github.com/android/ndk/issues/360
-[Issue 988]: https://github.com/android/ndk/issues/988
diff --git a/docs/changelogs/Changelog-r25.md b/docs/changelogs/Changelog-r25.md
deleted file mode 100644
index 474a331..0000000
--- a/docs/changelogs/Changelog-r25.md
+++ /dev/null
@@ -1,143 +0,0 @@
-# Changelog
-
-Report issues to [GitHub].
-
-For Android Studio issues, follow the docs on the [Android Studio site].
-
-If you're a build system maintainer that needs to use the tools in the NDK
-directly, see the [build system maintainers guide].
-
-[GitHub]: https://github.com/android/ndk/issues
-[Android Studio site]: http://tools.android.com/filing-bugs
-[build system maintainers guide]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-
-
-## Announcements
-
-* Support for KitKat (APIs 19 and 20) is planned to be removed in the next NDK
-  release. The minimum OS supported by the NDK for r26 will be Lollipop (API
-  level 21). See [Issue 1751] for details.
-
-[Issue 1751]: https://github.com/android/ndk/issues/1751
-
-## r25c
-
-* Updated LLVM to clang-r450784d1, based on LLVM 14 development.
-  * [Issue 1797]: Fixed LLDB handling of forward slashes in absolute paths on
-    Windows.
-  * [Issue 1832]: Improvements to aarch64 vector code generation.
-* [Issue 1813]: `-Wl,--gc-sections` is no longer set by default for debug
-  builds. This behavior was removed because it could cause the linker to remove
-  functions that may be useful to evaluate during debugging. The new CMake
-  toolchain file (`-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF`, not the default
-  behavior) does not include this fix because it requires a CMake fix first.
-* [Issue 1757]: Updated simpleperf. Includes fix for signing denial when run on
-  macOS.
-
-[Issue 1797]: https://github.com/android/ndk/issues/1797
-[Issue 1813]: https://github.com/android/ndk/issues/1813
-[Issue 1832]: https://github.com/android/ndk/issues/1832
-
-## r25b
-
-* [Issue 1739]: Fixed C compatibility issue in `amidi/AMidi.h`.
-* [Issue 1740]: Fixed the legacy toolchain when using CMake's `Release` build
-  configuration. Since r23b it has not be receiving any optimization flag. It
-  will now receive `-O3`. If you're building with AGP and haven't overridden
-  AGP's default CMake modes, this change does not affect you, as AGP uses
-  `RelWithDebInfo` by default.
-* [Issue 1744]: Fixes ASan wrap.sh file to support 32-bit apps on 64-bit
-  devices.
-
-[Issue 1739]: https://github.com/android/ndk/issues/1739
-[Issue 1740]: https://github.com/android/ndk/issues/1740
-[Issue 1744]: https://github.com/android/ndk/issues/1744
-
-## Changes
-
-* Includes Android 13 APIs.
-* Updated LLVM to clang-r450784d, based on LLVM 14 development.
-  * [Issue 1455]: Improved display of Android API levels in Clang diagnostics.
-  * [Issue 1608]: Fixed crash in vector conversions.
-  * [Issue 1710]: Fixed compiler crash caused by invalid `-march` values.
-* Eliminate duplicate static libraries in API-versioned sysroot directories.
-  This reduces the uncompressed size of the NDK by 500 MB.
-* Strip some binaries and libraries. This reduces the uncompressed size of
-  the NDK by 300 MB.
-* Remove python2. All scripts now use python3.
-* [Issue 933]: Updated reference ASan wrap.sh to support attaching the Java
-  debugger.
-* [Issue 1334]: Improved argument escaping for compile_commands.json files
-  generated by ndk-build.
-* [Issue 1634]: Fixed the build rule for the libshaderc_combined target.
-* [Issue 1693]: The NDK's toolchain file for CMake (`android.toolchain.cmake`)
-  defaults to the legacy toolchain file for all versions of CMake. The new
-  toolchain file can still be enabled using
-  `-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF`.
-* [Issue 1717]: Arranged for --gc-sections to be passed to the linker when
-  building shared libraries.
-
-[Issue 933]: https://github.com/android/ndk/issues/933
-[Issue 1334]: https://github.com/android/ndk/issues/1334
-[Issue 1455]: https://github.com/android/ndk/issues/1455
-[Issue 1608]: https://github.com/android/ndk/issues/1608
-[Issue 1634]: https://github.com/android/ndk/issues/1634
-[Issue 1693]: https://github.com/android/ndk/issues/1693
-[Issue 1710]: https://github.com/android/ndk/issues/1710
-[Issue 1717]: https://github.com/android/ndk/issues/1717
-
-## Known Issues
-
-This is not intended to be a comprehensive list of all outstanding bugs.
-
-* [Issue 360]: `thread_local` variables with non-trivial destructors will cause
-  segfaults if the containing library is `dlclose`ed. This was fixed in API 28,
-  but code running on devices older than API 28 will need a workaround. The
-  simplest fix is to **stop calling `dlclose`**. If you absolutely must continue
-  calling `dlclose`, see the following table:
-
-  |                   | Pre-API 23           |  APIs 23-27   | API 28+ |
-  | ----------------- | -------------------- | ------------- | ------- |
-  | No workarounds    | Works for static STL | Broken        | Works   |
-  | `-Wl,-z,nodelete` | Works for static STL | Works         | Works   |
-  | No `dlclose`      | Works                | Works         | Works   |
-
-  If your code must run on devices older than M (API 23) and you cannot use the
-  static STL (common), **the only fix is to not call `dlclose`**, or to stop
-  using `thread_local` variables with non-trivial destructors.
-
-  If your code does not need to run on devices older than API 23 you can link
-  with `-Wl,-z,nodelete`, which instructs the linker to ignore `dlclose` for
-  that library. You can backport this behavior by not calling `dlclose`.
-
-  The fix in API 28 is the standardized inhibition of `dlclose`, so you can
-  backport the fix to older versions by not calling `dlclose`.
-
-* [Issue 988]: Exception handling when using ASan via wrap.sh can crash. To
-  workaround this issue when using libc++_shared, ensure that your application's
-  libc++_shared.so is in `LD_PRELOAD` in your `wrap.sh` as in the following
-  example:
-
-  ```bash
-  #!/system/bin/sh
-  HERE="$(cd "$(dirname "$0")" && pwd)"
-  export ASAN_OPTIONS=log_to_syslog=false,allow_user_segv_handler=1
-  ASAN_LIB=$(ls $HERE/libclang_rt.asan-*-android.so)
-  if [ -f "$HERE/libc++_shared.so" ]; then
-      # Workaround for https://github.com/android/ndk/issues/988.
-      export LD_PRELOAD="$ASAN_LIB $HERE/libc++_shared.so"
-  else
-      export LD_PRELOAD="$ASAN_LIB"
-  fi
-  "$@"
-   ```
-
-  There is no known workaround for libc++_static.
-
-  Note that because this is a platform bug rather than an NDK bug this cannot be
-  fixed with an NDK update. This workaround will be necessary for code running
-  on devices that do not contain the fix, and the bug has not been fixed even in
-  the latest release of Android.
-
-[Issue 360]: https://github.com/android/ndk/issues/360
-[Issue 988]: https://github.com/android/ndk/issues/988
diff --git a/docs/changelogs/Changelog-r26.md b/docs/changelogs/Changelog-r26.md
deleted file mode 100644
index 2ef4d7a..0000000
--- a/docs/changelogs/Changelog-r26.md
+++ /dev/null
@@ -1,187 +0,0 @@
-# Changelog
-
-Report issues to [GitHub].
-
-For Android Studio issues, follow the docs on the [Android Studio site].
-
-If you're a build system maintainer that needs to use the tools in the NDK
-directly, see the [build system maintainers guide].
-
-[GitHub]: https://github.com/android/ndk/issues
-[Android Studio site]: http://tools.android.com/filing-bugs
-[build system maintainers guide]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-
-## Announcements
-
-* KitKat (APIs 19 and 20) is no longer supported. The minimum OS supported by
-  the NDK is Lollipop (API level 21). See [Issue 1751] for details.
-* libc++ has been updated. The NDK's libc++ now comes directly from our LLVM
-  toolchain, so every future LLVM update is also a libc++ update. Future
-  changelogs will not explicitly mention libc++ updates.
-
-[Issue 1751]: https://github.com/android/ndk/issues/1751
-
-## r26d
-
-* [Issue 1994]: Fixed ndk-gdb/ndk-lldb to use the correct path for
-  make and other tools.
-
-[Issue 1994]: https://github.com/android/ndk/issues/1994
-
-## r26c
-
-* Updated LLVM to clang-r487747e. See `AndroidVersion.txt` and
-  `clang_source_info.md` in the toolchain directory for version information.
-  * [Issue 1928]: Fixed Clang crash in instruction selection for 32-bit armv8
-    floating point.
-  * [Issue 1953]: armeabi-v7a libc++ libraries are once again built as thumb.
-
-[Issue 1928]: https://github.com/android/ndk/issues/1928
-[Issue 1953]: https://github.com/android/ndk/issues/1953
-
-## r26b
-
-* Updated LLVM to clang-r487747d. See `AndroidVersion.txt` and
-  `clang_source_info.md` in the toolchain directory for version information.
-  * This update was intended to be included in r26 RC 1. The original release
-    noted these fixes in the changelog, but the new toolchain had not actually
-    been included.
-  * [Issue 1907]: HWASan linker will be used automatically for
-    `minSdkVersion 34` or higher.
-  * [Issue 1909]: Fixed ABI mismatch between function-multi-versioning and ifunc
-    resolvers.
-* [Issue 1938]: Fixed ndk-stack to use the correct path for llvm-symbolizer and
-  other tools.
-
-[Issue 1907]: https://github.com/android/ndk/issues/1907
-[Issue 1909]: https://github.com/android/ndk/issues/1909
-[Issue 1938]: https://github.com/android/ndk/issues/1938
-
-## Changes
-
-* Updated LLVM to clang-r487747c. See `AndroidVersion.txt` and
-  `clang_source_info.md` in the toolchain directory for version information.
-  * Clang now treats `-Wimplicit-function-declaration` as an error rather than a
-    warning in C11 and newer. Clang's default C standard is 17, so this is a
-    change in default behavior compared to older versions of Clang, but is the
-    behavior defined by C99.
-
-    If you encounter these errors when upgrading, you most likely forgot an
-    `#include`. If you cannot (or do not want to) fix those issues, you can
-    revert to the prior behavior with
-    `-Wno-error=implicit-function-declaration`.
-
-    C++ users are unaffected. This has never been allowed in C++.
-
-    See https://reviews.llvm.org/D122983 for more details.
-  * [Issue 1298]: Fixed seccomp error with ASan on x86_64 devices.
-  * [Issue 1530]: Updated libc++ to match LLVM version.
-  * [Issue 1565]: Fixed lldb ncurses issue with terminal database on Darwin.
-  * [Issue 1677]: Fixed Clang crash in optimizer.
-  * [Issue 1679]: Clang will now automatically enable ELF TLS for
-    `minSdkVersion 29` or higher.
-  * [Issue 1834]: Fixed Clang crash during SVE conversions.
-  * [Issue 1860]: Fixed miscompilation affecting armv7.
-  * [Issue 1861]: Fixed front end crash in Clang.
-  * [Issue 1862]: Fixed Clang crash for aarch64 with `-Os`.
-  * [Issue 1880]: Fixed crash in clang-format.
-  * [Issue 1883]: Fixed crash when incorrectly using neon intrinsics.
-* Version scripts that name public symbols that are not present in the library
-  will now emit an error by default for ndk-build and the CMake toolchain file.
-  Build failures caused by this error are likely a bug in your library or a
-  mistake in the version script. To revert to the earlier behavior, pass
-  `-DANDROID_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS=ON` to CMake or set
-  `LOCAL_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS := true` in your `Android.mk`
-  file. For other build systems, see the secion titled "Version script
-  validation" in the [build system maintainers guide].
-* [Issue 873]: Weak symbols for API additions is supported. Provide
-  `__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__` as an option.
-* [Issue 1400]: NDK paths with spaces will now be diagnosed by ndk-build on
-  Windows. This has never been supported for any OS, but the error message
-  wasn't previously working on Windows either.
-* [Issue 1764]: Fixed Python 3 incompatibility when using `ndk-gdb` with `-f`.
-* [Issue 1803]: Removed useless `strtoq` and `strtouq` from the libc stub
-  libraries. These were never exposed in the header files, but could confuse
-  some autoconf like systems.
-* [Issue 1852]: Fixed ODR issue in linux/time.h.
-* [Issue 1878]: Fixed incorrect definition of `WIFSTOPPED`.
-* ndk-build now uses clang rather than clang++ when linking modules that do not
-  have C++ sources. There should not be any observable behavior differences
-  because ndk-build previously handled the C/C++ linking differences itself.
-* ndk-build now delegates C++ stdlib linking to the Clang driver. It is unlikely
-  that this will cause any observable behavior change, but any new behavior will
-  more closely match CMake and other build systems.
-
-[Issue 837]: https://github.com/android/ndk/issues/837
-[Issue 1298]: https://github.com/android/ndk/issues/1298
-[Issue 1400]: https://github.com/android/ndk/issues/1400
-[Issue 1530]: https://github.com/android/ndk/issues/1530
-[Issue 1565]: https://github.com/android/ndk/issues/1565
-[Issue 1677]: https://github.com/android/ndk/issues/1677
-[Issue 1679]: https://github.com/android/ndk/issues/1679
-[Issue 1764]: https://github.com/android/ndk/issues/1764
-[Issue 1803]: https://github.com/android/ndk/issues/1803
-[Issue 1834]: https://github.com/android/ndk/issues/1834
-[Issue 1852]: https://github.com/android/ndk/issues/1852
-[Issue 1860]: https://github.com/android/ndk/issues/1860
-[Issue 1861]: https://github.com/android/ndk/issues/1861
-[Issue 1862]: https://github.com/android/ndk/issues/1862
-[Issue 1878]: https://github.com/android/ndk/issues/1878
-[Issue 1880]: https://github.com/android/ndk/issues/1880
-[Issue 1883]: https://github.com/android/ndk/issues/1883
-
-## Known Issues
-
-This is not intended to be a comprehensive list of all outstanding bugs.
-
-* [Issue 360]: `thread_local` variables with non-trivial destructors will cause
-  segfaults if the containing library is `dlclose`ed. This was fixed in API 28,
-  but code running on devices older than API 28 will need a workaround. The
-  simplest fix is to **stop calling `dlclose`**. If you absolutely must continue
-  calling `dlclose`, see the following table:
-
-  |                   | Pre-API 23           |  APIs 23-27   | API 28+ |
-  | ----------------- | -------------------- | ------------- | ------- |
-  | No workarounds    | Works for static STL | Broken        | Works   |
-  | `-Wl,-z,nodelete` | Works for static STL | Works         | Works   |
-  | No `dlclose`      | Works                | Works         | Works   |
-
-  If your code must run on devices older than M (API 23) and you cannot use the
-  static STL (common), **the only fix is to not call `dlclose`**, or to stop
-  using `thread_local` variables with non-trivial destructors.
-
-  If your code does not need to run on devices older than API 23 you can link
-  with `-Wl,-z,nodelete`, which instructs the linker to ignore `dlclose` for
-  that library. You can backport this behavior by not calling `dlclose`.
-
-  The fix in API 28 is the standardized inhibition of `dlclose`, so you can
-  backport the fix to older versions by not calling `dlclose`.
-
-* [Issue 988]: Exception handling when using ASan via wrap.sh can crash. To
-  workaround this issue when using libc++_shared, ensure that your application's
-  libc++_shared.so is in `LD_PRELOAD` in your `wrap.sh` as in the following
-  example:
-
-  ```bash
-  #!/system/bin/sh
-  HERE="$(cd "$(dirname "$0")" && pwd)"
-  export ASAN_OPTIONS=log_to_syslog=false,allow_user_segv_handler=1
-  ASAN_LIB=$(ls $HERE/libclang_rt.asan-*-android.so)
-  if [ -f "$HERE/libc++_shared.so" ]; then
-      # Workaround for https://github.com/android/ndk/issues/988.
-      export LD_PRELOAD="$ASAN_LIB $HERE/libc++_shared.so"
-  else
-      export LD_PRELOAD="$ASAN_LIB"
-  fi
-  "$@"
-   ```
-
-  There is no known workaround for libc++_static.
-
-  Note that because this is a platform bug rather than an NDK bug this cannot be
-  fixed with an NDK update. This workaround will be necessary for code running
-  on devices that do not contain the fix, and the bug has not been fixed even in
-  the latest release of Android.
-
-[Issue 360]: https://github.com/android/ndk/issues/360
-[Issue 988]: https://github.com/android/ndk/issues/988
diff --git a/docs/changelogs/Changelog-r27.md b/docs/changelogs/Changelog-r27.md
deleted file mode 100644
index d7af2fa..0000000
--- a/docs/changelogs/Changelog-r27.md
+++ /dev/null
@@ -1,81 +0,0 @@
-# Changelog
-
-Report issues to [GitHub].
-
-For Android Studio issues, go to https://b.android.com and file a bug using the
-Android Studio component, not the NDK component.
-
-If you're a build system maintainer that needs to use the tools in the NDK
-directly, see the [build system maintainers guide].
-
-[GitHub]: https://github.com/android/ndk/issues
-[build system maintainers guide]: https://android.googlesource.com/platform/ndk/+/master/docs/BuildSystemMaintainers.md
-
-## Announcements
-
-* Android V will allow OEMs to ship arm64-v8a and x86_64 devices with 16KiB page
-  sizes. Devices that use this configuration will not be able to run existing
-  apps that use native code. To be compatible with these devices, applications
-  will need to rebuild all their native code to be 16KiB aligned, and rewrite
-  any code which assumes a specific page size. ndk-build and CMake have options
-  to enable this mode (see note about `APP_SUPPORT_FLEXIBLE_PAGE_SIZES` and
-  `ANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES` below). A future version of the NDK will
-  enable this mode by default. If you're using or maintaining a third-party
-  build system, consult the [build system maintainers guide] for instructions.
-
-  See [Support 16 KB page sizes] for more information.
-
-[Support 16 KB page sizes]: https://developer.android.com/guide/practices/page-sizes
-
-## Changes
-
-* Updated LLVM to clang-r522817. See `clang_source_info.md` in the toolchain
-  directory for version information.
-  * [Issue 1728]: Clang now emits an error for invalid Android target versions.
-  * [Issue 1853]: `clang-scan-deps` is now included.
-  * [Issue 1947]: Fixed various function multi-versioning crashes.
-  * [Issue 1963]: Fixed undefined behavior in `std::unexpected::has_value()`.
-  * [Issue 1988]: Added aarch64 support for `preserve_all` calling convention.
-* A RISC-V sysroot (AKA riscv64, or rv64) has been added. It is **not**
-  supported. It is present to aid bringup for OS vendors, but it's not yet a
-  supported Android ABI. It will not be built by default.
-* [Issue 1856]: Target-prefixed cmd wrappers for clang should now behave
-  appropriately when the first argument includes quotes. **You probably do not
-  need to use those wrappers.** In most cases where you would use
-  `aarch64-linux-android21-clang`, you can instead use `clang -target
-  aarch64-linux-android21`, e.g. `CC="clang -target aarch64-linux-android21"
-  ./configure`. The wrappers are only needed when working with systems that do
-  not properly handle a `CC` that includes arguments.
-* [Issue 1898]: ndk-stack now tolerates 0x prefixed addresses.
-* [Issue 1921]: `ANDROID_USE_LEGACY_TOOLCHAIN_FILE` value is now preserved
-  during try-compile steps when `ON`.
-* [Issue 1974]: Unintentionally shipped Vulkan headers have been removed from
-  `sources/third_party/vulkan`. The standard Vulkan headers are included in the
-  Android sysroot, which Clang will find automatically.
-* [Issue 1993]: ndk-stack now tolerates invalid UTF-8 characters in the trace.
-* [Issue 1994]: Fixed ndk-gdb/ndk-lldb to use the correct path for
-  make and other tools.
-* Added `APP_SUPPORT_FLEXIBLE_PAGE_SIZES` for ndk-build and
-  `ANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES` for CMake. Set to
-  `APP_SUPPORT_FLEXIBLE_PAGE_SIZES := true` in your `Application.mk` or pass
-  `-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON` to CMake (via
-  `android.defaultConfig.externalNativeBuild.cmake.arguments` if you're using
-  the Android Gradle Plugin) to build your code to be compatible with devices
-  that use a 16KiB page size. Third-party build system users and maintainers
-  should consult the [build system maintainers guide].
-* Symlinks are now properly preserved in the macOS App Bundle. The NDK installed
-  via that method is now the same size as the one installed via the SDK manager.
-* The unsupported libclang, libclang-cpp, libLLVM, and libLTO libraries were
-  removed to save space.
-
-[Issue 1728]: https://github.com/android/ndk/issues/1728
-[Issue 1853]: https://github.com/android/ndk/issues/1853
-[Issue 1856]: https://github.com/android/ndk/issues/1856
-[Issue 1898]: https://github.com/android/ndk/issues/1898
-[Issue 1921]: https://github.com/android/ndk/issues/1921
-[Issue 1947]: https://github.com/android/ndk/issues/1947
-[Issue 1963]: https://github.com/android/ndk/issues/1963
-[Issue 1974]: https://github.com/android/ndk/issues/1974
-[Issue 1988]: https://github.com/android/ndk/issues/1988
-[Issue 1993]: https://github.com/android/ndk/issues/1993
-[Issue 1994]: https://github.com/android/ndk/issues/1994
diff --git a/docs/repo.md b/docs/repo.md
deleted file mode 100644
index d54dd65..0000000
--- a/docs/repo.md
+++ /dev/null
@@ -1,119 +0,0 @@
-# Using repo and Gerrit for the NDK
-
-Android uses [repo](https://source.android.com/setup/develop#repo) for managing
-the collections of individual git repositories needed by each "tree". See that
-page for more information, including install instructions. You'll need to
-install repo before you can work with Android repositories.
-
-## Downloading the source
-
-AOSP's [Downloading the source] explains how to initialize a new tree (similar
-to `git clone`, but for a whole repo tree). For the NDK the main difference is
-that you need to specify the master-ndk branch by adding `-b master-ndk`. See
-[Building.md](Building.md) for more specific instructions.
-
-Warning: Unlike `git clone`, `repo init` will **not** check out the source into
-a new directory. If you run `repo init && repo sync` in your home directory you
-will have a lot to clean up. If you run `repo init` in a tree that already has a
-different repo tree in it, you'll reinitialize that tree with a different branch
-and the next sync may delete existing projects.
-
-`repo init` only prepares the tree, you'll still need to run `repo sync` to
-fetch the source.
-
-[Downloading the source]: https://source.android.com/setup/build/downloading
-
-## Pulling updates
-
-The `repo` equivalent of `git pull` is `repo sync`. It's recommended that you
-run `repo sync -c -j $THREADS`. `-c` ensures that only the branches that are
-needed are fetched to avoid consuming too much disk space, and `-j $THREADS`
-allows repo to use multiple threads when updating.
-
-## Making changes
-
-`repo` trees place git projects in a detached HEAD state by default. To create a
-local branch that can be sent for review, use `repo start $BRANCH_NAME .`. This
-is similar to `git checkout -b` or `git switch -c`, but will configure the new
-branch so that it can be uploaded for review.
-
-Any time you start new work, use `repo start $BRANCH_NAME .`. Each local branch
-is its own review chain, so you'll want separate branches for distinct changes.
-
-## Committing changes
-
-When you're ready to upload your changes, commit them to your local branch.
-Gerrit will create one review per commit and will associate commits that are in
-the same branch together in a series. If you made many commits while working on
-the change, squash them (`repo rebase -i --autosquash .` will start an
-interactive rebase that you can use to re-order and combine commits) so that
-each commit is complete but minimal. Splitting large changes into multiple
-commits improves reviewability, but each commit should be able to build and pass
-tests on its own so it can be submitted individually.
-
-In most cases you'll only need one commit in a branch.
-
-## Uploading changes for review
-
-To get changes reviewed, run `repo upload --cbr .`. This will upload the commits
-in the current branch and directory. To upload changes in other directories,
-pass those paths instead of `.`. Multiple paths can be given to upload multiple
-projects with the same command.`--cbr` is "current branch". Without `--cbr` an
-interactive mode will open in your editor that you can use to upload branches
-other than the one that is currently checked out.
-
-You can also add reviewers when uploading your changes with the
-`--re=$USER1,$USER2,...` argument. If your reviewers are Googlers, Gerrit can
-*usually* work out that `name` means `name@google.com`. In other cases you'll
-need to spell out the full email address of the reviewer.
-
-Once the change is uploaded, you can open it in Gerrit and enable autosubmit if
-you want the change to be submitted once it has been reviewed and passed
-presubmit testing. Don't worry, if anyone has review comments those will prevent
-the change from being submitted before you can see them.
-
-To minimize the amount of time you spend waiting on machines, it's best to vote
-Presubmit Ready+1 on your change when you upload it to get treehugger running
-ASAP. By default treehugger won't start testing a change until it has either
-been +2'd or has autosubmit set.
-
-If you have changes that span multiple git projects that need to be submitted
-together (are mutually dependent), give them the same topic name in Gerrit. If
-you used the same branch name when running `repo start` in each project, you can
-pass `-t` to `repo upload` to automatically assign the branch name as the topic
-name in Gerrit.
-
-## Responding to review comments
-
-To update your commit to address review comments, make the change and amend the
-commit. If you have a stack of commits and need to amend something other than
-the top commit, you can use the `--fixup` argument to `git commit` and then
-rebase with `--autosquash`, or use the `edit` option on a commit after starting
-an interactive rebase (`repo rebase -i --autosquash .`).
-
-After amending your changes, run `repo upload` again as you did before. This
-will create a new "patch set" (PS) for the existing review in Gerrit. This is a
-new "version" of the commit. You do not need to specify reviewers again when
-uploading new patch sets.
-
-## Submitting
-
-Once you have approval (Code Review +2) and either a +1 or a +2 from someone in
-the OWNERS file (or are in the OWNERS file yourself), all review comments have
-been addressed and marked as "Done", and presubmit (AKA "treehugger") has
-responded with Presubmit Verified +1, your change can be submitted. If you
-enabled autosubmit this will happen automatically. Otherwise use the "Submit"
-button in the UI.
-
-## Cheat sheet for GitHub users
-
-The repo/Gerrit process has some differences compared to the GitHub Pull Request
-workflow. If you're familiar with using pull requests you should read everything
-above for the details, but here's a quick cheat sheet mapping similar processes:
-
-GitHub | Gerrit
---- | ---
-`git clone $X` | `mkdir $X && cd $X && repo init $X && repo sync`
-`git pull` | `repo sync`
-`git checkout -b`/`git switch -c` | `repo start .`
-`git push` and other pull request steps | `repo upload`
diff --git a/docs/user/common_problems.md b/docs/user/common_problems.md
index 33eee7e..4778a60 100644
--- a/docs/user/common_problems.md
+++ b/docs/user/common_problems.md
@@ -1 +1,179 @@
-Moved to https://developer.android.com/ndk/guides/common-problems
+# Common Problems and Solutions
+
+This document lists common issues that users encounter when using the NDK. It is
+by no means complete, but represents some of the most common non-bugs we see
+filed.
+
+
+## Using `_FILE_OFFSET_BITS=64` With Early API Levels
+
+Prior to [Unified Headers], the NDK did not support `_FILE_OFFSET_BITS=64`. If
+you defined it when building, it was silently ignored. With [Unified Headers]
+the `_FILE_OFFSET_BITS=64` option is now supported, but on old versions of
+Android very few of the `off_t` APIs were available as an `off64_t` variant, so
+using this feature with old API levels will result in fewer functions being
+available.
+
+This problem is explained in detail in the [r16 blog post] and in the [bionic
+documentation].
+
+[Unified Headers]: ../UnifiedHeaders.md
+[r16 blog post]: https://android-developers.googleblog.com/2017/09/introducing-android-native-development.html
+[bionic documentation]: https://android.googlesource.com/platform/bionic/+/master/docs/32-bit-abi.md
+
+**Problem**: Your build is asking for APIs that do not exist in your
+`minSdkVersion`.
+
+**Solution**: Disable `_FILE_OFFSET_BITS=64` or raise your `minSdkVersion`.
+
+### Undeclared or implicit definition of `mmap`
+
+In C++:
+
+> error: use of undeclared identifier 'mmap'
+
+In C:
+
+> warning: implicit declaration of function 'mmap' is invalid in C99
+
+Using `_FILE_OFFSET_BITS=64` instructs the C library to use `mmap64` instead of
+`mmap`. `mmap64` was not available until android-21. If your `minSdkVersion`
+value is lower than 21, the C library does not contain an `mmap` that is
+compatible with `_FILE_OFFSET_BITS=64`, so the function is unavailable.
+
+**Note**: `mmap` is only the most common manifestation of this problem. The same
+is true of any function in the C library that has an `off_t` parameter.
+
+**Note**: As of r16 beta 2, the C library exposes `mmap64` as an inline function
+to mitigate this instance of this issue.
+
+TODO: Update this section once we know what the next most common problem is.
+
+
+## Target API Set Higher Than Device API
+
+The target API level in the NDK has a very different meaning than
+`targetSdkVersion` does in Java. The NDK target API level is your app's
+**minimum** supported API level. In ndk-build, this is your `APP_PLATFORM`
+setting.
+
+Since references to functions are (typically) resolved when a library is
+loaded rather than when they are first called, you cannot reference APIs that
+are not always present and guard their use with API level checks. If they are
+referred to at all, they must be present.
+
+**Problem**: Your target API level is higher than the API supported by your
+device.
+
+**Solution**: Set your target API level (`APP_PLATFORM`) to the minimum version
+of Android your app supports.
+
+Build System         | Setting
+---------------------|-------------------
+ndk-build            | `APP_PLATFORM`
+CMake                | `ANDROID_PLATFORM`
+Standalone Toolchain | `--api`
+Gradle               | TODO: No idea
+
+### Cannot Locate `__aeabi` Symbols
+
+> UnsatisfiedLinkError: dlopen failed: cannot locate symbol "`__aeabi_memcpy`"
+
+Note that these are *runtime* errors. These errors will appear in the log when
+you attempt to load your native libraries. The symbol might be any of
+`__aeabi_*` (`__aeabi_memcpy` and `__aeabi_memclr` seem to be the most common).
+
+This problem is documented at https://github.com/android-ndk/ndk/issues/126.
+
+### Cannot Locate Symbol `rand`
+
+> UnsatisfiedLinkError: dlopen failed: cannot locate symbol "`rand`"
+
+This problem was explained very well on Stack Overflow:
+http://stackoverflow.com/a/27338365/632035
+
+There are a handful of other symbols that are also affected by this.
+TODO: Figure out what the other ones were.
+
+
+## Undefined Reference to `__atomic_*`
+
+**Problem**: Some ABIs (particularly armeabi) need libatomic to provide some
+implementations for atomic operations.
+
+**Solution**: Add `-latomic` when linking.
+
+> error: undefined reference to '`__atomic_exchange_4`'
+
+The actual symbol here might be anything prefixed with `__atomic_`.
+
+Note that ndk-build, cmake, and libc++ standalone toolchains handle this for
+you. For non libc++ standalone toolchains or a different build system, you may
+need to do this manually.
+
+
+## RTTI/Exceptions Not Working Across Library Boundaries
+
+**Problem**: Exceptions are not being caught when thrown across shared library
+boundaries, or `dynamic_cast` is failing.
+
+**Solution**: Add a [key function] to your types. A key function is the first
+non-pure, out-of-line virtual function for a type. For an example, see the
+discussion on [Issue 533].
+
+The [C++ ABI] states that two objects have the same type if and only if their
+`type_info` pointers are identical. Exceptions may only be caught if the
+`type_info` for the catch matches the thrown exception. The same rule applies
+for `dynamic_cast`.
+
+When a type does not have a key function, its typeinfo is emitted as a weak
+symbol and matching type infos are merged when libraries are loaded. When
+loading libraries dynamically after the executable has been loaded (i.e. via
+`dlopen` or `System.loadLibrary`), it may not be possible for the loader to
+merge type infos for the loaded libraries. When this happens, the two types are
+not considered equal.
+
+Note that for non-polymorphic types, the type cannot have a key function. For
+non-polymorphic types, RTTI is unnecessary, as `std::is_same` can be used to
+determine type equality at compile time.
+
+[C++ ABI]: https://itanium-cxx-abi.github.io/cxx-abi/abi.html#rtti
+[Issue 533]: https://github.com/android-ndk/ndk/issues/533#issuecomment-335977747
+[key function]: https://itanium-cxx-abi.github.io/cxx-abi/abi.html#vague-vtable
+
+
+## Using Mismatched Prebuilt Libraries
+
+Using prebuilt libraries (third-party libraries, typically) in your application
+requires a bit of extra care. In general, the following rules need to be
+followed:
+
+* The resulting app's minimum API level is the maximum of all API levels
+  targeted by all libraries.
+
+  If your target API level is android-9, but you're using a prebuilt library
+  that was built against android-16, the resulting app's minimum API level is
+  android-16.  Failure to adhere to this will be visible at build time if the
+  prebuilt library is static, but may not appear until run time for prebuilt
+  shared libraries.
+
+* All libraries should be generated with the same NDK version.
+
+  This rule is a bit more flexible than most, but in general NDK code is only
+  guaranteed to be compatible with code generated with the same version of the
+  NDK (minor revision mismatches generally okay).
+
+* All libraries must use the same STL.
+
+  A library using libc++ will not interoperate with one using stlport. All
+  libraries in an application must use the same STL.
+
+  Strictly speaking this can be made to work, but it's a very fragile
+  configuration. Avoid it.
+
+* Apps with multiple shared libraries must use a shared STL.
+
+  https://developer.android.com/ndk/guides/cpp-support.html#sr
+
+  As with mismatched STLs, the problems caused by this can be avoided if great
+  care is taken, but it's better to just avoid the problem.
diff --git a/docs/user/middleware_vendors.md b/docs/user/middleware_vendors.md
index f3a01b5..215403c 100644
--- a/docs/user/middleware_vendors.md
+++ b/docs/user/middleware_vendors.md
@@ -1 +1,92 @@
-Moved to https://developer.android.com/ndk/guides/middleware-vendors
+# Advice for Middleware Vendors
+
+Distributing middleware built with the NDK imposes some additional problems that
+app developers do not need to worry about. Prebuilt libraries impose some of
+their implementation choices on their users.
+
+## Choosing API levels and NDK versions
+
+Your users cannot use a `minSdkVersion` lower than yours. If your users' apps
+need to run on Android 16, you cannot build for Android 21.
+
+NDK versions are largely compatible with each other, but occasionally there are
+changes that break compatibility. If you know that all of your users are using
+the same version of the NDK, it's best to use the same version that they do.
+Otherwise, use the newest version.
+
+## Using the STL
+
+If you're writing C++ and using the STL, your choice between libc++_shared and
+libc++_static affects your users if you distribute a shared library. If you
+distribute a shared library, you must either use libc++_shared or ensure that
+libc++'s symbols are not exposed by your library. The best way to do this is to
+explicitly declare your ABI surface with a version script (this also helps keep
+your implementation details private). For example, a simple arithmetic library
+might have the following version script:
+
+Note: If you distribute a static library, it does not matter whether you choose
+a static or shared STL because nothing is linked in a static library. The user
+can link whichever they choose in their application. They must link *something*,
+even for C-only consumers, so be sure to document that it is required and which
+version of the NDK was used to build in case of incompatibility in STL versions.
+
+```txt
+LIBMYMATH {
+global:
+    add;
+    sub;
+    mul;
+    div;
+    # C++ symbols in an extern block will be mangled automatically. See
+    # https://stackoverflow.com/a/21845178/632035 for more examples.
+    extern "C++" {
+        "pow(int, int)";
+    }
+local:
+    *;
+};
+```
+
+A version script should be the preferred option because it is the most robust
+way to control symbol visibility. Another, less robust option is to use
+`-Wl,--exclude-libs,libc++_static.a -Wl,--exclude-libs,libc++abi.a` when
+linking. This is less robust because it will only hide the symbols in the
+libraries that are explicitly named, and no diagnostics are reported for
+libraries that are not used (a typo in the library name is not an error, and the
+burden is on the user to keep the library list up to date).
+
+## For Java Middleware with JNI Libraries
+
+Java libraries that include JNI libraries (i.e. use `jniLibs`) need to be
+careful that the JNI libraries they include will not collide with other
+libraries in the user's app. For example, if the AAR includes
+`libc++_shared.so`, but a different version of `libc++_shared.so` than the app
+uses, only one will be installed to the APK and that may lead to unreliable
+behavior.
+
+Warning: [Bug 141758241]: The Android Gradle Plugin does not currently diagnose
+this error condition. One of the identically named libraries will be arbitrarily
+chosen for packaging in the APK.
+
+[Bug 141758241]: https://issuetracker.google.com/141758241
+
+The most reliable solution is for Java libraries to include no more than **one**
+JNI library. All dependencies including the STL should be statically linked into
+the implementation library, and a version script should be used to enforce the
+ABI surface. For example, a Java library com.example.foo that includes the JNI
+library libfooimpl.so should use the following version script:
+
+```txt
+LIBFOOIMPL {
+global:
+    JNI_OnLoad;
+local:
+    *;
+};
+```
+
+Note that this example uses `registerNatives` via `JNI_OnLoad` as described in
+[JNI Tips] to ensure that the minimal ABI surface is exposed and library load
+time is minimized.
+
+[JNI Tips]: https://developer.android.com/training/articles/perf-jni#native-libraries
diff --git a/infra/docker/Dockerfile b/infra/docker/Dockerfile
index ee3711b..8afd5ab 100644
--- a/infra/docker/Dockerfile
+++ b/infra/docker/Dockerfile
@@ -5,6 +5,7 @@
 RUN apt-get install -y bison
 RUN apt-get install -y build-essential
 RUN apt-get install -y curl
+RUN apt-get install -y dos2unix
 RUN apt-get install -y flex
 RUN apt-get install -y git
 RUN apt-get install -y make
@@ -21,4 +22,4 @@
     https://commondatastorage.googleapis.com/git-repo-downloads/repo
 RUN chmod a+x /usr/bin/repo
 
-CMD ["/usr/bin/env bash"]
+CMD ["/bin/bash"]
diff --git a/meta/abis.json b/meta/abis.json
index 665a768..2c43a49 100644
--- a/meta/abis.json
+++ b/meta/abis.json
@@ -2,46 +2,21 @@
   "armeabi-v7a": {
     "bitness": 32,
     "default": true,
-    "deprecated": false,
-    "proc": "armv7-a",
-    "arch": "arm",
-    "triple": "arm-linux-androideabi",
-    "llvm_triple": "armv7-none-linux-androideabi"
+    "deprecated": false
   },
   "arm64-v8a": {
     "bitness": 64,
     "default": true,
-    "deprecated": false,
-    "proc": "aarch64",
-    "arch": "arm64",
-    "triple": "aarch64-linux-android",
-    "llvm_triple": "aarch64-none-linux-android"
-  },
-  "riscv64": {
-    "bitness": 64,
-    "default": false,
-    "deprecated": false,
-    "proc": "riscv64",
-    "arch": "riscv64",
-    "triple": "riscv64-linux-android",
-    "llvm_triple": "riscv64-none-linux-android"
+    "deprecated": false
   },
   "x86": {
     "bitness": 32,
     "default": true,
-    "deprecated": false,
-    "proc": "i686",
-    "arch": "x86",
-    "triple": "i686-linux-android",
-    "llvm_triple": "i686-none-linux-android"
+    "deprecated": false
   },
   "x86_64": {
     "bitness": 64,
     "default": true,
-    "deprecated": false,
-    "proc": "x86_64",
-    "arch": "x86_64",
-    "triple": "x86_64-linux-android",
-    "llvm_triple": "x86_64-none-linux-android"
+    "deprecated": false
   }
 }
diff --git a/meta/platforms.json b/meta/platforms.json
index 498afa0..2afb441 100644
--- a/meta/platforms.json
+++ b/meta/platforms.json
@@ -1,6 +1,6 @@
 {
-  "min": 21,
-  "max": 35,
+  "min": 16,
+  "max": 29,
   "aliases": {
     "20": 19,
     "25": 24,
@@ -16,12 +16,6 @@
     "O": 26,
     "O-MR1": 27,
     "P": 28,
-    "Q": 29,
-    "R": 30,
-    "S": 31,
-    "Sv2": 32,
-    "Tiramisu": 33,
-    "UpsideDownCake": 34,
-    "VanillaIceCream": 35
+    "Q": 29
   }
 }
diff --git a/mypy.ini b/mypy.ini
new file mode 100644
index 0000000..ba09da5
--- /dev/null
+++ b/mypy.ini
@@ -0,0 +1,8 @@
+[mypy]
+python_version = 3.6
+
+[mypy-ndk.*]
+disallow_untyped_defs = True
+
+[mypy-adb]
+ignore_missing_imports = True
diff --git a/navbar.md b/navbar.md
index 6c87cb6..1662a75 100644
--- a/navbar.md
+++ b/navbar.md
@@ -1,5 +1,4 @@
 * [Home](/README.md)
-* [Onboarding](/docs/Onboarding.md)
 * [Building](/docs/Building.md)
 * [Clang Migration](/docs/ClangMigration.md)
 * [Testing](/docs/Testing.md)
diff --git a/ndk-gdb b/ndk-gdb
index 7f84e0a..567a51c 100755
--- a/ndk-gdb
+++ b/ndk-gdb
@@ -1,5 +1,3 @@
-#!/usr/bin/env bash
-THIS_DIR=$(cd "$(dirname "$0")" && pwd)
-ANDROID_NDK_ROOT=$(cd "$THIS_DIR/../../.." && pwd)
-. "$ANDROID_NDK_ROOT/build/tools/ndk_bin_common.sh"
-"$ANDROID_NDK_PYTHON" "$THIS_DIR/ndk-gdb.py" "$@"
+#!/bin/sh
+NDK_BIN_DIR=$(dirname "$0")
+"${NDK_BIN_DIR}"/python "${NDK_BIN_DIR}"/ndk-gdb.py "$@"
diff --git a/ndk-gdb.cmd b/ndk-gdb.cmd
index eaa909b..9f81511 100755
--- a/ndk-gdb.cmd
+++ b/ndk-gdb.cmd
@@ -1,5 +1,6 @@
 @echo off
 setlocal
-set ANDROID_NDK_PYTHON=%~dp0..\..\..\toolchains\llvm\prebuilt\windows-x86_64\python3\python.exe
+set PREBUILT_BIN=%~dp0
 set SHELL=cmd
-"%ANDROID_NDK_PYTHON%" -u "%~dp0ndk-gdb.py" %*
+"%PREBUILT_BIN%/python.exe" -u "%~dp0ndk-gdb.py" %*
+endlocal
diff --git a/ndk-gdb.py b/ndk-gdb.py
new file mode 100755
index 0000000..dbf1e0d
--- /dev/null
+++ b/ndk-gdb.py
@@ -0,0 +1,708 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from __future__ import print_function
+
+import argparse
+import contextlib
+import os
+import operator
+import posixpath
+import signal
+import subprocess
+import sys
+import time
+import xml.etree.cElementTree as ElementTree
+
+import logging
+
+# Shared functions across gdbclient.py and ndk-gdb.py.
+# ndk-gdb is installed to $NDK/prebuilt/<platform>/bin
+NDK_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..'))
+sys.path.append(os.path.join(NDK_PATH, "python-packages"))
+import adb
+import gdbrunner
+
+
+def log(msg):
+    logger = logging.getLogger(__name__)
+    logger.info(msg)
+
+
+def enable_verbose_logging():
+    logger = logging.getLogger(__name__)
+    handler = logging.StreamHandler(sys.stdout)
+    formatter = logging.Formatter()
+
+    handler.setFormatter(formatter)
+    logger.addHandler(handler)
+    logger.propagate = False
+
+    logger.setLevel(logging.INFO)
+
+
+def error(msg):
+    sys.exit("ERROR: {}".format(msg))
+
+
+class ArgumentParser(gdbrunner.ArgumentParser):
+    def __init__(self):
+        super(ArgumentParser, self).__init__()
+        self.add_argument(
+            "--verbose", "-v", action="store_true",
+            help="enable verbose mode")
+
+        self.add_argument(
+            "--force", "-f", action="store_true",
+            help="kill existing debug session if it exists")
+
+        self.add_argument(
+            "--port", type=int, nargs="?", default="5039",
+            help="override the port used on the host")
+
+        self.add_argument(
+            "--delay", type=float, default=0.25,
+            help="delay in seconds to wait after starting activity.\n"
+                 "defaults to 0.25, higher values may be needed on slower devices.")
+
+        self.add_argument(
+            "-p", "--project", dest="project",
+            help="specify application project path")
+
+        app_group = self.add_argument_group("target selection")
+        start_group = app_group.add_mutually_exclusive_group()
+
+        start_group.add_argument(
+            "--attach", nargs='?', dest="package_name", metavar="PKG_NAME",
+            help="attach to application (default)\n"
+                 "autodetects PKG_NAME if not specified")
+
+        # NB: args.launch can be False (--attach), None (--launch), or a string
+        start_group.add_argument(
+            "--launch", nargs='?', dest="launch", default=False,
+            metavar="ACTIVITY",
+            help="launch application activity\n"
+                 "launches main activity if ACTIVITY not specified")
+
+        start_group.add_argument(
+            "--launch-list", action="store_true",
+            help="list all launchable activity names from manifest")
+
+        debug_group = self.add_argument_group("debugging options")
+        debug_group.add_argument(
+            "-x", "--exec", dest="exec_file",
+            help="execute gdb commands in EXEC_FILE after connection")
+
+        debug_group.add_argument(
+            "--nowait", action="store_true",
+            help="do not wait for debugger to attach (may miss early JNI "
+                 "breakpoints)")
+
+        if sys.platform.startswith("win"):
+            tui_help = argparse.SUPPRESS
+        else:
+            tui_help = "use GDB's tui mode"
+
+        debug_group.add_argument(
+            "-t", "--tui", action="store_true", dest="tui",
+            help=tui_help)
+
+
+def extract_package_name(xmlroot):
+    if "package" in xmlroot.attrib:
+        return xmlroot.attrib["package"]
+    error("Failed to find package name in AndroidManifest.xml")
+
+
+ANDROID_XMLNS = "{http://schemas.android.com/apk/res/android}"
+def extract_launchable(xmlroot):
+    '''
+    A given application can have several activities, and each activity
+    can have several intent filters. We want to only list, in the final
+    output, the activities which have a intent-filter that contains the
+    following elements:
+
+      <action android:name="android.intent.action.MAIN" />
+      <category android:name="android.intent.category.LAUNCHER" />
+    '''
+    launchable_activities = []
+    application = xmlroot.findall("application")[0]
+
+    main_action = "android.intent.action.MAIN"
+    launcher_category = "android.intent.category.LAUNCHER"
+    name_attrib = "{}name".format(ANDROID_XMLNS)
+
+    for activity in application.iter("activity"):
+        if name_attrib not in activity.attrib:
+            continue
+
+        for intent_filter in activity.iter("intent-filter"):
+            found_action = False
+            found_category = False
+            for child in intent_filter:
+                if child.tag == "action":
+                    if not found_action and name_attrib in child.attrib:
+                        if child.attrib[name_attrib] == main_action:
+                            found_action = True
+                if child.tag == "category":
+                    if not found_category and name_attrib in child.attrib:
+                        if child.attrib[name_attrib] == launcher_category:
+                            found_category = True
+            if found_action and found_category:
+                launchable_activities.append(activity.attrib[name_attrib])
+    return launchable_activities
+
+
+def ndk_bin_path():
+    return os.path.dirname(os.path.realpath(__file__))
+
+
+def handle_args():
+    def find_program(program, paths):
+        '''Find a binary in paths'''
+        exts = [""]
+        if sys.platform.startswith("win"):
+            exts += [".exe", ".bat", ".cmd"]
+        for path in paths:
+            if os.path.isdir(path):
+                for ext in exts:
+                    full = path + os.sep + program + ext
+                    if os.path.isfile(full):
+                        return full
+        return None
+
+    # FIXME: This is broken for PATH that contains quoted colons.
+    paths = os.environ["PATH"].replace('"', '').split(os.pathsep)
+
+    args = ArgumentParser().parse_args()
+
+    if args.tui and sys.platform.startswith("win"):
+        error("TUI is unsupported on Windows.")
+
+    ndk_bin = ndk_bin_path()
+    args.make_cmd = find_program("make", [ndk_bin])
+    args.jdb_cmd = find_program("jdb", paths)
+    if args.make_cmd is None:
+        error("Failed to find make in '{}'".format(ndk_bin))
+    if args.jdb_cmd is None:
+        print("WARNING: Failed to find jdb on your path, defaulting to "
+              "--nowait")
+        args.nowait = True
+
+    if args.verbose:
+        enable_verbose_logging()
+
+    return args
+
+
+def find_project(args):
+    manifest_name = "AndroidManifest.xml"
+    if args.project is not None:
+        log("Using project directory: {}".format(args.project))
+        args.project = os.path.realpath(os.path.expanduser(args.project))
+        if not os.path.exists(os.path.join(args.project, manifest_name)):
+            msg = "could not find AndroidManifest.xml in '{}'"
+            error(msg.format(args.project))
+    else:
+        # Walk upwards until we find AndroidManifest.xml, or run out of path.
+        current_dir = os.getcwdu()
+        while not os.path.exists(os.path.join(current_dir, manifest_name)):
+            parent_dir = os.path.dirname(current_dir)
+            if parent_dir == current_dir:
+                error("Could not find AndroidManifest.xml in current"
+                      " directory or a parent directory.\n"
+                      "       Launch this script from inside a project, or"
+                      " use --project=<path>.")
+            current_dir = parent_dir
+        args.project = current_dir
+        log("Using project directory: {} ".format(args.project))
+    args.manifest_path = os.path.join(args.project, manifest_name)
+    return args.project
+
+
+def canonicalize_activity(package_name, activity_name):
+    if activity_name.startswith("."):
+        return "{}{}".format(package_name, activity_name)
+    return activity_name
+
+
+def parse_manifest(args):
+    manifest = ElementTree.parse(args.manifest_path)
+    manifest_root = manifest.getroot()
+    package_name = extract_package_name(manifest_root)
+    log("Found package name: {}".format(package_name))
+
+    activities = extract_launchable(manifest_root)
+    activities = [canonicalize_activity(package_name, a) for a in activities]
+
+    if args.launch_list:
+        print("Launchable activities: {}".format(", ".join(activities)))
+        sys.exit(0)
+
+    args.activities = activities
+    args.package_name = package_name
+
+
+def select_target(args):
+    assert args.launch != False
+
+    if len(args.activities) == 0:
+        error("No launchable activities found.")
+
+    if args.launch is None:
+        target = args.activities[0]
+
+        if len(args.activities) > 1:
+            print("WARNING: Multiple launchable activities found, choosing"
+                  " '{}'.".format(args.activities[0]))
+    else:
+        activity_name = canonicalize_activity(args.package_name, args.launch)
+
+        if activity_name not in args.activities:
+            msg = "Could not find launchable activity: '{}'."
+            error(msg.format(activity_name))
+        target = activity_name
+    return target
+
+
+@contextlib.contextmanager
+def cd(path):
+    curdir = os.getcwd()
+    os.chdir(path)
+    os.environ["PWD"] = path
+    try:
+        yield
+    finally:
+        os.environ["PWD"] = curdir
+        os.chdir(curdir)
+
+
+def dump_var(args, variable, abi=None):
+    make_args = [args.make_cmd, "--no-print-dir", "-f",
+                 os.path.join(NDK_PATH, "build/core/build-local.mk"),
+                 "-C", args.project, "DUMP_{}".format(variable)]
+
+    if abi is not None:
+        make_args.append("APP_ABI={}".format(abi))
+
+    with cd(args.project):
+        try:
+            make_output = subprocess.check_output(make_args, cwd=args.project)
+        except subprocess.CalledProcessError:
+            error("Failed to retrieve application ABI from Android.mk.")
+    return make_output.splitlines()[-1]
+
+
+def get_api_level(device):
+    # Check the device API level
+    try:
+        api_level = int(device.get_prop("ro.build.version.sdk"))
+    except (TypeError, ValueError):
+        error("Failed to find target device's supported API level.\n"
+              "ndk-gdb only supports devices running Android 2.2 or higher.")
+    if api_level < 8:
+        error("ndk-gdb only supports devices running Android 2.2 or higher.\n"
+              "(expected API level 8, actual: {})".format(api_level))
+
+    return api_level
+
+
+def fetch_abi(args):
+    '''
+    Figure out the intersection of which ABIs the application is built for and
+    which ones the device supports, then pick the one preferred by the device,
+    so that we know which gdbserver to push and run on the device.
+    '''
+
+    app_abis = dump_var(args, "APP_ABI").split(" ")
+    if "all" in app_abis:
+        app_abis = dump_var(args, "NDK_ALL_ABIS").split(" ")
+    app_abis_msg = "Application ABIs: {}".format(", ".join(app_abis))
+    log(app_abis_msg)
+
+    new_abi_props = ["ro.product.cpu.abilist"]
+    old_abi_props = ["ro.product.cpu.abi", "ro.product.cpu.abi2"]
+    abi_props = new_abi_props
+    if args.device.get_prop("ro.product.cpu.abilist") is None:
+        abi_props = old_abi_props
+
+    device_abis = []
+    for key in abi_props:
+        value = args.device.get_prop(key)
+        if value is not None:
+            device_abis.extend(value.split(","))
+
+    device_abis_msg = "Device ABIs: {}".format(", ".join(device_abis))
+    log(device_abis_msg)
+
+    for abi in device_abis:
+        if abi in app_abis:
+            # TODO(jmgao): Do we expect gdb to work with ARM-x86 translation?
+            log("Selecting ABI: {}".format(abi))
+            return abi
+
+    msg = "Application cannot run on the selected device."
+
+    # Don't repeat ourselves.
+    if not args.verbose:
+        msg += "\n{}\n{}".format(app_abis_msg, device_abis_msg)
+
+    error(msg)
+
+
+def get_run_as_cmd(user, cmd):
+    return ["run-as", user] + cmd
+
+
+def get_app_data_dir(args, package_name):
+    cmd = ["/system/bin/sh", "-c", "pwd", "2>/dev/null"]
+    cmd = get_run_as_cmd(package_name, cmd)
+    (rc, stdout, _) = args.device.shell_nocheck(cmd)
+    if rc != 0:
+        error("Could not find application's data directory. Are you sure that "
+              "the application is installed and debuggable?")
+    data_dir = stdout.strip()
+
+    # Applications with minSdkVersion >= 24 will have their data directories
+    # created with rwx------ permissions, preventing adbd from forwarding to
+    # the gdbserver socket. To be safe, if we're on a device >= 24, always
+    # chmod the directory.
+    if get_api_level(args.device) >= 24:
+        chmod_cmd = ["/system/bin/chmod", "a+x", data_dir]
+        chmod_cmd = get_run_as_cmd(package_name, chmod_cmd)
+        (rc, _, _) = args.device.shell_nocheck(chmod_cmd)
+        if rc != 0:
+            error("Failed to make application data directory world executable")
+
+    log("Found application data directory: {}".format(data_dir))
+    return data_dir
+
+
+def abi_to_arch(abi):
+    if abi.startswith("armeabi"):
+        return "arm"
+    elif abi == "arm64-v8a":
+        return "arm64"
+    else:
+        return abi
+
+
+def get_gdbserver_path(args, package_name, app_data_dir, arch):
+    app_gdbserver_path = "{}/lib/gdbserver".format(app_data_dir)
+    cmd = ["ls", app_gdbserver_path, "2>/dev/null"]
+    cmd = get_run_as_cmd(package_name, cmd)
+    (rc, _, _) = args.device.shell_nocheck(cmd)
+    if rc == 0:
+        log("Found app gdbserver: {}".format(app_gdbserver_path))
+        return app_gdbserver_path
+
+    # We need to upload our gdbserver
+    log("App gdbserver not found at {}, uploading.".format(app_gdbserver_path))
+    local_path = "{}/prebuilt/android-{}/gdbserver/gdbserver"
+    local_path = local_path.format(NDK_PATH, arch)
+    remote_path = "/data/local/tmp/{}-gdbserver".format(arch)
+    args.device.push(local_path, remote_path)
+
+    # Copy gdbserver into the data directory on M+, because selinux prevents
+    # execution of binaries directly from /data/local/tmp.
+    if get_api_level(args.device) >= 23:
+        destination = "{}/{}-gdbserver".format(app_data_dir, arch)
+        log("Copying gdbserver to {}.".format(destination))
+        cmd = ["cat", remote_path, "|", "run-as", package_name,
+               "sh", "-c", "'cat > {}'".format(destination)]
+        (rc, _, _) = args.device.shell_nocheck(cmd)
+        if rc != 0:
+            error("Failed to copy gdbserver to {}.".format(destination))
+        (rc, _, _) = args.device.shell_nocheck(["run-as", package_name,
+                                                "chmod", "700", destination])
+        if rc != 0:
+            error("Failed to chmod gdbserver at {}.".format(destination))
+
+        remote_path = destination
+
+    log("Uploaded gdbserver to {}".format(remote_path))
+    return remote_path
+
+
+def pull_binaries(device, out_dir, app_64bit):
+    required_files = []
+    libraries = ["libc.so", "libm.so", "libdl.so"]
+
+    if app_64bit:
+        required_files = ["/system/bin/app_process64", "/system/bin/linker64"]
+        library_path = "/system/lib64"
+    else:
+        required_files = ["/system/bin/linker"]
+        library_path = "/system/lib"
+
+    for library in libraries:
+        required_files.append(posixpath.join(library_path, library))
+
+    for required_file in required_files:
+        # os.path.join not used because joining absolute paths will pick the last one
+        local_path = os.path.realpath(out_dir + required_file)
+        local_dirname = os.path.dirname(local_path)
+        if not os.path.isdir(local_dirname):
+            os.makedirs(local_dirname)
+        log("Pulling '{}' to '{}'".format(required_file, local_path))
+        device.pull(required_file, local_path)
+
+    # /system/bin/app_process is 32-bit on 32-bit devices, but a symlink to
+    # app_process64 on 64-bit. If we need the 32-bit version, try to pull
+    # app_process32, and if that fails, pull app_process.
+    if not app_64bit:
+        destination = os.path.realpath(out_dir + "/system/bin/app_process")
+        try:
+            device.pull("/system/bin/app_process32", destination)
+        except:
+            device.pull("/system/bin/app_process", destination)
+
+def generate_gdb_script(args, sysroot, binary_path, app_64bit, jdb_pid, connect_timeout=5):
+    if sys.platform.startswith("win"):
+        # GDB expects paths to use forward slashes.
+        sysroot = sysroot.replace("\\", "/")
+        binary_path = binary_path.replace("\\", "/")
+
+    gdb_commands = "set osabi GNU/Linux\n"
+    gdb_commands += "file '{}'\n".format(binary_path)
+
+    solib_search_path = [sysroot, "{}/system/bin".format(sysroot)]
+    if app_64bit:
+        solib_search_path.append("{}/system/lib64".format(sysroot))
+    else:
+        solib_search_path.append("{}/system/lib".format(sysroot))
+    solib_search_path = os.pathsep.join(solib_search_path)
+    gdb_commands += "set solib-absolute-prefix {}\n".format(sysroot)
+    gdb_commands += "set solib-search-path {}\n".format(solib_search_path)
+
+    # Try to connect for a few seconds, sometimes the device gdbserver takes
+    # a little bit to come up, especially on emulators.
+    gdb_commands += """
+python
+
+def target_remote_with_retry(target, timeout_seconds):
+  import time
+  end_time = time.time() + timeout_seconds
+  while True:
+    try:
+      gdb.execute('target remote ' + target)
+      return True
+    except gdb.error as e:
+      time_left = end_time - time.time()
+      if time_left < 0 or time_left > timeout_seconds:
+        print("Error: unable to connect to device.")
+        print(e)
+        return False
+      time.sleep(min(0.25, time_left))
+
+target_remote_with_retry(':{}', {})
+
+end
+""".format(args.port, connect_timeout)
+
+    if jdb_pid is not None:
+        # After we've interrupted the app, reinvoke ndk-gdb.py to start jdb and
+        # wake up the app.
+        gdb_commands += """
+python
+def start_jdb_to_unblock_app():
+  import subprocess
+  subprocess.Popen({})
+start_jdb_to_unblock_app()
+end
+    """.format(repr(
+            [
+                sys.executable,
+                os.path.realpath(__file__),
+                "--internal-wakeup-pid-with-jdb",
+                args.device.adb_path,
+                args.device.serial,
+                args.jdb_cmd,
+                str(jdb_pid),
+                str(bool(args.verbose)),
+            ]))
+
+    if args.exec_file is not None:
+        try:
+            exec_file = open(args.exec_file, "r")
+        except IOError:
+            error("Failed to open GDB exec file: '{}'.".format(args.exec_file))
+
+        with exec_file:
+            gdb_commands += exec_file.read()
+
+    return gdb_commands
+
+
+def start_jdb(adb_path, serial, jdb_cmd, pid, verbose):
+    pid = int(pid)
+    device = adb.get_device(serial, adb_path=adb_path)
+    if verbose == "True":
+        enable_verbose_logging()
+
+    log("Starting jdb to unblock application.")
+
+    # Do setup stuff to keep ^C in the parent from killing us.
+    signal.signal(signal.SIGINT, signal.SIG_IGN)
+    windows = sys.platform.startswith("win")
+    if not windows:
+        os.setpgrp()
+
+    jdb_port = 65534
+    device.forward("tcp:{}".format(jdb_port), "jdwp:{}".format(pid))
+    jdb_cmd = [jdb_cmd, "-connect",
+               "com.sun.jdi.SocketAttach:hostname=localhost,port={}".format(jdb_port)]
+
+    flags = subprocess.CREATE_NEW_PROCESS_GROUP if windows else 0
+    jdb = subprocess.Popen(jdb_cmd,
+                           stdin=subprocess.PIPE,
+                           stdout=subprocess.PIPE,
+                           stderr=subprocess.STDOUT,
+                           creationflags=flags)
+
+    # Wait until jdb can communicate with the app. Once it can, the app will
+    # start polling for a Java debugger (e.g. every 200ms). We need to wait
+    # a while longer then so that the app notices jdb.
+    jdb_magic = "__verify_jdb_has_started__"
+    jdb.stdin.write('print "{}"\n'.format(jdb_magic))
+    saw_magic_str = False
+    while True:
+        line = jdb.stdout.readline()
+        if line == "":
+            break
+        log("jdb output: " + line.rstrip())
+        if jdb_magic in line and not saw_magic_str:
+            saw_magic_str = True
+            time.sleep(0.3)
+            jdb.stdin.write("exit\n")
+    jdb.wait()
+    if saw_magic_str:
+        log("JDB finished unblocking application.")
+    else:
+        log("error: did not find magic string in JDB output.")
+
+
+def main():
+    if sys.argv[1:2] == ["--internal-wakeup-pid-with-jdb"]:
+        return start_jdb(*sys.argv[2:])
+
+    args = handle_args()
+    device = args.device
+
+    if device is None:
+        error("Could not find a unique connected device/emulator.")
+
+    # Warn on old Pixel C firmware (b/29381985). Newer devices may have Yama
+    # enabled but still work with ndk-gdb (b/19277529).
+    yama_check = device.shell_nocheck(["cat", "/proc/sys/kernel/yama/ptrace_scope", "2>/dev/null"])
+    if (yama_check[0] == 0 and yama_check[1].rstrip() not in ["", "0"] and
+            (device.get_prop("ro.build.product"), device.get_prop("ro.product.name")) == ("dragon", "ryu")):
+        print("WARNING: The device uses Yama ptrace_scope to restrict debugging. ndk-gdb will")
+        print("    likely be unable to attach to a process. With root access, the restriction")
+        print("    can be lifted by writing 0 to /proc/sys/kernel/yama/ptrace_scope. Consider")
+        print("    upgrading your Pixel C to MXC89L or newer, where Yama is disabled.")
+
+    adb_version = subprocess.check_output(device.adb_cmd + ["version"])
+    log("ADB command used: '{}'".format(" ".join(device.adb_cmd)))
+    log("ADB version: {}".format(" ".join(adb_version.splitlines())))
+
+    project = find_project(args)
+    if args.package_name:
+        log("Attaching to specified package: {}".format(args.package_name))
+    else:
+        parse_manifest(args)
+
+    pkg_name = args.package_name
+
+    if args.launch is False:
+        log("Attaching to existing application process.")
+    else:
+        args.launch = select_target(args)
+        log("Selected target activity: '{}'".format(args.launch))
+
+    abi = fetch_abi(args)
+
+    out_dir = os.path.join(project, (dump_var(args, "TARGET_OUT", abi)))
+    out_dir = os.path.realpath(out_dir)
+
+    app_data_dir = get_app_data_dir(args, pkg_name)
+    arch = abi_to_arch(abi)
+    gdbserver_path = get_gdbserver_path(args, pkg_name, app_data_dir, arch)
+
+    # Kill the process and gdbserver if requested.
+    if args.force:
+        kill_pids = gdbrunner.get_pids(device, gdbserver_path)
+        if args.launch:
+            kill_pids += gdbrunner.get_pids(device, pkg_name)
+        kill_pids = map(str, kill_pids)
+        if kill_pids:
+            log("Killing processes: {}".format(", ".join(kill_pids)))
+            device.shell_nocheck(["run-as", pkg_name, "kill", "-9"] + kill_pids)
+
+    # Launch the application if needed, and get its pid
+    if args.launch:
+        am_cmd = ["am", "start"]
+        if not args.nowait:
+            am_cmd.append("-D")
+        component_name = "{}/{}".format(pkg_name, args.launch)
+        am_cmd.append(component_name)
+        log("Launching activity {}...".format(component_name))
+        (rc, _, _) = device.shell_nocheck(am_cmd)
+        if rc != 0:
+            error("Failed to start {}".format(component_name))
+
+        if args.delay > 0.0:
+            log("Sleeping for {} seconds.".format(args.delay))
+            time.sleep(args.delay)
+
+    pids = gdbrunner.get_pids(device, pkg_name)
+    if len(pids) == 0:
+        error("Failed to find running process '{}'".format(pkg_name))
+    if len(pids) > 1:
+        error("Multiple running processes named '{}'".format(pkg_name))
+    pid = pids[0]
+
+    # Pull the linker, zygote, and notable system libraries
+    app_64bit = "64" in abi
+    pull_binaries(device, out_dir, app_64bit)
+    if app_64bit:
+        zygote_path = os.path.join(out_dir, "system", "bin", "app_process64")
+    else:
+        zygote_path = os.path.join(out_dir, "system", "bin", "app_process")
+
+    # Start gdbserver.
+    debug_socket = posixpath.join(app_data_dir, "debug_socket")
+    log("Starting gdbserver...")
+    gdbrunner.start_gdbserver(
+        device, None, gdbserver_path,
+        target_pid=pid, run_cmd=None, debug_socket=debug_socket,
+        port=args.port, run_as_cmd=["run-as", pkg_name])
+
+    gdb_path = os.path.join(ndk_bin_path(), "gdb")
+
+    # Start jdb to unblock the application if necessary.
+    jdb_pid = pid if (args.launch and not args.nowait) else None
+
+    # Start gdb.
+    gdb_commands = generate_gdb_script(args, out_dir, zygote_path, app_64bit, jdb_pid)
+    gdb_flags = []
+    if args.tui:
+        gdb_flags.append("--tui")
+    gdbrunner.start_gdb(gdb_path, gdb_commands, gdb_flags)
+
+if __name__ == "__main__":
+    main()
diff --git a/ndk-stack b/ndk-stack
index 21eeef7..57d58b6 100755
--- a/ndk-stack
+++ b/ndk-stack
@@ -1,5 +1,3 @@
-#!/usr/bin/env bash
-THIS_DIR=$(cd "$(dirname "$0")" && pwd)
-ANDROID_NDK_ROOT=$(cd "$THIS_DIR/../../.." && pwd)
-. "$ANDROID_NDK_ROOT/build/tools/ndk_bin_common.sh"
-"$ANDROID_NDK_PYTHON" "$THIS_DIR/ndk-stack.py" "$@"
+#!/bin/sh
+NDK_BIN_DIR=$(dirname "$0")
+"${NDK_BIN_DIR}"/python "${NDK_BIN_DIR}"/ndk-stack.py "$@"
diff --git a/ndk-stack.cmd b/ndk-stack.cmd
index 3a2446e..7848900 100755
--- a/ndk-stack.cmd
+++ b/ndk-stack.cmd
@@ -1,6 +1,6 @@
 @echo off
 setlocal
-set ANDROID_NDK_PYTHON=%~dp0..\..\..\toolchains\llvm\prebuilt\windows-x86_64\python3\python.exe
+set PREBUILT_BIN=%~dp0
 set SHELL=cmd
-"%ANDROID_NDK_PYTHON%" -u "%~dp0ndk-stack.py" %*
+"%PREBUILT_BIN%/python.exe" -u "%~dp0ndk-stack.py" %*
 endlocal
diff --git a/ndk-stack.py b/ndk-stack.py
new file mode 100644
index 0000000..d4abcd1
--- /dev/null
+++ b/ndk-stack.py
@@ -0,0 +1,418 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Symbolizes stack traces from logcat.
+See https://developer.android.com/ndk/guides/ndk-stack for more information.
+"""
+
+from __future__ import print_function
+
+import argparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+EXE_SUFFIX = '.exe' if os.name == 'nt' else ''
+
+
+class TmpDir(object):
+    """Manage temporary directory creation."""
+
+    def __init__(self):
+        self._tmp_dir = None
+
+    def delete(self):
+        if self._tmp_dir:
+            shutil.rmtree(self._tmp_dir)
+
+    def get_directory(self):
+        if not self._tmp_dir:
+            self._tmp_dir = tempfile.mkdtemp()
+        return self._tmp_dir
+
+
+def get_ndk_paths():
+    """Parse and find all of the paths of the ndk
+
+    Returns: Three values:
+             Full path to the root of the ndk install.
+             Full path to the ndk bin directory where this executable lives.
+             The platform name (eg linux-x86_64).
+    """
+
+    # ndk-stack is installed to $NDK/prebuilt/<platform>/bin, so from
+    # `android-ndk-r18/prebuilt/linux-x86_64/bin/ndk-stack`...
+    # ...get `android-ndk-r18/`:
+    ndk_bin = os.path.dirname(os.path.realpath(__file__))
+    ndk_root = os.path.abspath(os.path.join(ndk_bin, '../../..'))
+    # ...get `linux-x86_64`:
+    ndk_host_tag = os.path.basename(
+        os.path.abspath(os.path.join(ndk_bin, '../')))
+    return (ndk_root, ndk_bin, ndk_host_tag)
+
+
+def find_llvm_symbolizer(ndk_root, ndk_bin, ndk_host_tag):
+    """Finds the NDK llvm-symbolizer(1) binary.
+
+    Returns: An absolute path to llvm-symbolizer(1).
+    """
+
+    llvm_symbolizer = 'llvm-symbolizer' + EXE_SUFFIX
+    path = os.path.join(ndk_root, 'toolchains', 'llvm', 'prebuilt',
+                        ndk_host_tag, 'bin', llvm_symbolizer)
+    if os.path.exists(path):
+        return path
+
+    # Okay, maybe we're a standalone toolchain? (https://github.com/android-ndk/ndk/issues/931)
+    # In that case, llvm-symbolizer and ndk-stack are conveniently in
+    # the same directory...
+    path = os.path.abspath(os.path.join(ndk_bin, llvm_symbolizer))
+    if os.path.exists(path):
+        return path
+    raise OSError('Unable to find llvm-symbolizer')
+
+
+def find_readelf(ndk_root, ndk_bin, ndk_host_tag):
+    """Finds the NDK readelf(1) binary.
+
+    Returns: An absolute path to readelf(1).
+    """
+
+    readelf = 'readelf' + EXE_SUFFIX
+    m = re.match('^[^-]+-(.*)', ndk_host_tag)
+    if m:
+        # Try as if this is not a standalone install.
+        arch = m.group(1)
+        if arch == 'arm':
+            platform_dir = arch + '-linux-androideabi'
+        else:
+            platform_dir = arch + '-linux-android'
+        path = os.path.join(ndk_root, 'toolchains', 'llvm', 'prebuilt',
+                            ndk_host_tag, platform_dir, 'bin', readelf)
+        if os.path.exists(path):
+            return path
+
+    # Might be a standalone toolchain, find the first readelf available,
+    # any should work.
+    arches = [
+        'aarch64-linux-android', 'arm-linux-androideabi',
+        'x86_64-linux-android', 'i686-linux-android'
+    ]
+    for arch in arches:
+        path = os.path.normpath(
+            os.path.join(ndk_bin, '..', arch, 'bin', readelf))
+        if os.path.exists(path):
+            return path
+    return None
+
+
+def get_build_id(readelf_path, elf_file):
+    """Get the GNU build id note from an elf file.
+
+    Returns: The build id found or None if there is no build id or the
+             readelf path does not exist.
+    """
+
+    try:
+        output = subprocess.check_output([readelf_path, '-n', elf_file])
+        m = re.search(r'Build ID:\s+([0-9a-f]+)', output.decode())
+        if not m:
+            return None
+        return m.group(1)
+    except subprocess.CalledProcessError:
+        return None
+
+
+def get_zip_info_from_offset(zip_file, offset):
+    """Get the ZipInfo object from a zip file.
+
+    Returns: A ZipInfo object found at the 'offset' into the zip file.
+             Returns None if no file can be found at the given 'offset'.
+    """
+
+    file_size = os.stat(zip_file.filename).st_size
+    if offset >= file_size:
+        return None
+
+    infos = zip_file.infolist()
+    if not infos or offset < infos[0].header_offset:
+        return None
+
+    for i in range(1, len(infos)):
+        prev_info = infos[i - 1]
+        cur_offset = infos[i].header_offset
+        if offset >= prev_info.header_offset and offset < cur_offset:
+            zip_info = prev_info
+            return zip_info
+    zip_info = infos[len(infos) - 1]
+    if offset < zip_info.header_offset:
+        return None
+    return zip_info
+
+
+class FrameInfo(object):
+    """A class to represent the data in a single backtrace frame.
+
+    Attributes:
+      num: The string representing the frame number (eg #01).
+      pc: The relative program counter for the frame.
+      elf_file: The file or map name in which the relative pc resides.
+      container_file: The name of the file that contains the elf_file.
+                      For example, an entry like GoogleCamera.apk!libsome.so
+                      would set container_file to GoogleCamera.apk and
+                      set elf_file to libsome.so. Set to None if no ! found.
+      offset: The offset into the file at which this library was mapped.
+              Set to None if no offset found.
+      build_id: The Gnu build id note parsed from the frame information.
+                Set to None if no build id found.
+      tail: The part of the line after the program counter.
+    """
+
+    # See unwindstack::FormatFrame in libunwindstack.
+    # We're deliberately very loose because NDK users are likely to be
+    # looking at crashes on ancient OS releases.
+    # TODO: support asan stacks too?
+    _line_re = re.compile(r'.* +(#[0-9]+) +pc ([0-9a-f]+) +(([^ ]+).*)')
+    _lib_re = re.compile(r'([^\!]+)\!(.+)')
+    _offset_re = re.compile(r'\(offset\s+(0x[0-9a-f]+)\)')
+    _build_id_re = re.compile(r'\(BuildId:\s+([0-9a-f]+)\)')
+
+    @classmethod
+    def from_line(cls, line):
+        m = FrameInfo._line_re.match(line)
+        if not m:
+            return None
+        return cls(*m.group(1, 2, 3, 4))
+
+    def __init__(self, num, pc, tail, elf_file):
+        self.num = num
+        self.pc = pc
+        self.tail = tail
+        self.elf_file = elf_file
+        m = FrameInfo._lib_re.match(self.elf_file)
+        if m:
+            self.container_file = m.group(1)
+            self.elf_file = m.group(2)
+            # Sometimes an entry like this will occur:
+            #   #01 pc 0000abcd  /system/lib/lib/libc.so!libc.so (offset 0x1000)
+            # In this case, no container file should be set.
+            if os.path.basename(self.container_file) == os.path.basename(
+                    self.elf_file):
+                self.elf_file = self.container_file
+                self.container_file = None
+        else:
+            self.container_file = None
+        m = FrameInfo._offset_re.search(self.tail)
+        if m:
+            self.offset = int(m.group(1), 16)
+        else:
+            self.offset = None
+        m = FrameInfo._build_id_re.search(self.tail)
+        if m:
+            self.build_id = m.group(1)
+        else:
+            self.build_id = None
+
+    def verify_elf_file(self, readelf_path, elf_file_path, display_elf_path):
+        """Verify if the elf file is valid.
+
+        Returns: True if the elf file exists and build id matches (if it exists).
+        """
+
+        if not os.path.exists(elf_file_path):
+            return False
+        if readelf_path and self.build_id:
+            build_id = get_build_id(readelf_path, elf_file_path)
+            if self.build_id != build_id:
+                print(
+                    'WARNING: Mismatched build id for %s' % (display_elf_path))
+                print('WARNING:   Expected %s' % (self.build_id))
+                print('WARNING:   Found    %s' % (build_id))
+                return False
+        return True
+
+    def get_elf_file(self, symbol_dir, readelf_path, tmp_dir):
+        """Get the path to the elf file represented by this frame.
+
+        Returns: The path to the elf file if it is valid, or None if
+                 no valid elf file can be found. If the file has to be
+                 extracted from an apk, the elf file will be placed in
+                 tmp_dir.
+        """
+
+        elf_file = os.path.basename(self.elf_file)
+        if self.container_file:
+            # This matches a file format such as Base.apk!libsomething.so
+            # so see if we can find libsomething.so in the symbol directory.
+            elf_file_path = os.path.join(symbol_dir, elf_file)
+            if self.verify_elf_file(readelf_path, elf_file_path,
+                                    elf_file_path):
+                return elf_file_path
+
+            apk_file_path = os.path.join(symbol_dir,
+                                         os.path.basename(self.container_file))
+            with zipfile.ZipFile(apk_file_path) as zip_file:
+                zip_info = get_zip_info_from_offset(zip_file, self.offset)
+                if not zip_info:
+                    return None
+                elf_file_path = zip_file.extract(zip_info,
+                                                 tmp_dir.get_directory())
+                display_elf_file = '%s!%s' % (apk_file_path, elf_file)
+                if not self.verify_elf_file(readelf_path, elf_file_path,
+                                            display_elf_file):
+                    return None
+                return elf_file_path
+        elif elf_file[-4:] == '.apk':
+            # This matches a stack line such as:
+            #   #08 pc 00cbed9c  GoogleCamera.apk (offset 0x6e32000)
+            apk_file_path = os.path.join(symbol_dir, elf_file)
+            with zipfile.ZipFile(apk_file_path) as zip_file:
+                zip_info = get_zip_info_from_offset(zip_file, self.offset)
+                if not zip_info:
+                    return None
+
+                # Rewrite the output tail so that it goes from:
+                #   GoogleCamera.apk ...
+                # To:
+                #   GoogleCamera.apk!libsomething.so ...
+                index = self.tail.find(elf_file)
+                if index != -1:
+                    index += len(elf_file)
+                    self.tail = (self.tail[0:index] + '!' + os.path.basename(
+                        zip_info.filename) + self.tail[index:])
+                elf_file = os.path.basename(zip_info.filename)
+                elf_file_path = os.path.join(symbol_dir, elf_file)
+                if self.verify_elf_file(readelf_path, elf_file_path,
+                                        elf_file_path):
+                    return elf_file_path
+
+                elf_file_path = zip_file.extract(zip_info,
+                                                 tmp_dir.get_directory())
+                display_elf_path = '%s!%s' % (apk_file_path, elf_file)
+                if not self.verify_elf_file(readelf_path, elf_file_path,
+                                            display_elf_path):
+                    return None
+                return elf_file_path
+        elf_file_path = os.path.join(symbol_dir, elf_file)
+        if self.verify_elf_file(readelf_path, elf_file_path, elf_file_path):
+            return elf_file_path
+        return None
+
+
+def main(argv):
+    """"Program entry point."""
+    parser = argparse.ArgumentParser(
+        description='Symbolizes Android crashes.',
+        epilog='See <https://developer.android.com/ndk/guides/ndk-stack>.')
+    parser.add_argument(
+        '-sym',
+        '--sym',
+        dest='symbol_dir',
+        required=True,  # TODO: default to '.'?
+        help='directory containing unstripped .so files')
+    parser.add_argument(
+        '-i',
+        '-dump',
+        '--dump',
+        dest='input',
+        default=sys.stdin,
+        type=argparse.FileType('r'),
+        help='input filename')
+    args = parser.parse_args(argv)
+
+    if not os.path.exists(args.symbol_dir):
+        sys.exit('{} does not exist!\n'.format(args.symbol_dir))
+
+    ndk_paths = get_ndk_paths()
+    symbolize_cmd = [
+        find_llvm_symbolizer(*ndk_paths), '--demangle', '--functions=linkage',
+        '--inlining=true', '--use-symbol-table=true'
+    ]
+    readelf_path = find_readelf(*ndk_paths)
+
+    symbolize_proc = None
+    try:
+        tmp_dir = TmpDir()
+        symbolize_proc = subprocess.Popen(
+            symbolize_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        banner = '*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***'
+        in_crash = False
+        saw_frame = False
+        for line in args.input:
+            line = line.rstrip()
+
+            if not in_crash:
+                if banner in line:
+                    in_crash = True
+                    saw_frame = False
+                    print('********** Crash dump: **********')
+                continue
+
+            for tag in ['Build fingerprint:', 'Abort message:']:
+                if tag in line:
+                    print(line[line.find(tag):])
+                    continue
+
+            frame_info = FrameInfo.from_line(line)
+            if not frame_info:
+                if saw_frame:
+                    in_crash = False
+                    print('Crash dump is completed\n')
+                continue
+            saw_frame = True
+
+            try:
+                elf_file = frame_info.get_elf_file(args.symbol_dir,
+                                                   readelf_path, tmp_dir)
+            except IOError:
+                elf_file = None
+
+            # Print a slightly different version of the stack trace line.
+            # The original format:
+            #      #00 pc 0007b350  /lib/bionic/libc.so (__strchr_chk+4)
+            # becomes:
+            #      #00 0x0007b350 /lib/bionic/libc.so (__strchr_chk+4)
+            out_line = '%s 0x%s %s' % (frame_info.num, frame_info.pc,
+                                       frame_info.tail)
+            print(out_line)
+            indent = (out_line.find('(') + 1) * ' '
+            if not elf_file:
+                continue
+            value = '"%s" 0x%s\n' % (elf_file, frame_info.pc)
+            symbolize_proc.stdin.write(value.encode())
+            symbolize_proc.stdin.flush()
+            while True:
+                symbolizer_output = symbolize_proc.stdout.readline().rstrip()
+                if not symbolizer_output:
+                    break
+                # TODO: rewrite file names base on a source path?
+                print('%s%s' % (indent, symbolizer_output.decode()))
+    finally:
+        args.input.close()
+        tmp_dir.delete()
+        if symbolize_proc:
+            symbolize_proc.stdin.close()
+            symbolize_proc.stdout.close()
+            symbolize_proc.kill()
+            symbolize_proc.wait()
+
+
+if __name__ == '__main__':
+    main(sys.argv[1:])
diff --git a/ndk-which b/ndk-which
index 44f888c..2eaf93c 100755
--- a/ndk-which
+++ b/ndk-which
@@ -1,4 +1,4 @@
-#!/usr/bin/env bash
+#!/bin/bash
 #
 # Copyright (C) 2012 The Android Open Source Project
 #
@@ -33,25 +33,6 @@
   exit 1
 }
 
-error() {
-  echo "The tool: $1 doesn't exist"
-  echo "Possible choices are: "
-  count=0
-  for file in $2*
-  do
-    if [[ $file == *$1 ]]
-    then
-        echo $file
-        ((count = count + 1))
-    fi
-  done
-  if [ $count -eq 0 ]
-  then
-      echo " None "
-  fi
-  exit 1
-}
-
 ABI=armeabi-v7a
 
 while (( "$#" )); do
@@ -91,7 +72,7 @@
   fi
   NDK_PROJECT_PATH=$TMPDIR $GNUMAKE --no-print-dir -f $MYNDKDIR/build/core/build-local.mk DUMP_$1 APP_ABI=$2
 }
-LLVM_TOOLCHAIN_PREFIX=`get_build_var_for_abi LLVM_TOOLCHAIN_PREFIX $ABI`
+
 TOOLCHAIN_PREFIX=`get_build_var_for_abi TOOLCHAIN_PREFIX $ABI`
 rm -Rf $TMPDIR
 
@@ -99,12 +80,4 @@
 FQFN=${TOOLCHAIN_PREFIX}$TOOL
 
 # use the host system's 'which' to decide/report if the file exists or not, and is executable
-if [ ! -f $FQFN ]
-then
-    FQFN=${LLVM_TOOLCHAIN_PREFIX}llvm-$TOOL
-    if [ ! -f $FQFN ]
-    then
-        error $TOOL $LLVM_TOOLCHAIN_PREFIX
-    fi
-fi
 which "$FQFN"
diff --git a/ndk/abis.py b/ndk/abis.py
index 18dfad7..57914ce 100644
--- a/ndk/abis.py
+++ b/ndk/abis.py
@@ -14,26 +14,23 @@
 # limitations under the License.
 #
 """Constants and helper functions for NDK ABIs."""
-from collections.abc import Iterator
-from typing import NewType, Optional
+from typing import List, NewType
 
-from .platforms import FIRST_LP64_API_LEVEL, FIRST_RISCV64_API_LEVEL, MIN_API_LEVEL
 
-Arch = NewType("Arch", str)
-Abi = NewType("Abi", str)
-Toolchain = NewType("Toolchain", str)
+Arch = NewType('Arch', str)
+Abi = NewType('Abi', str)
+Toolchain = NewType('Toolchain', str)
 
 
 LP32_ABIS = (
-    Abi("armeabi-v7a"),
-    Abi("x86"),
+    'armeabi-v7a',
+    'x86',
 )
 
 
 LP64_ABIS = (
-    Abi("arm64-v8a"),
-    Abi("riscv64"),
-    Abi("x86_64"),
+    'arm64-v8a',
+    'x86_64',
 )
 
 
@@ -41,83 +38,101 @@
 
 
 ALL_ARCHITECTURES = (
-    Arch("arm"),
-    Arch("arm64"),
-    Arch("riscv64"),
-    Arch("x86"),
-    Arch("x86_64"),
+    Arch('arm'),
+    Arch('arm64'),
+    Arch('x86'),
+    Arch('x86_64'),
+)
+
+
+ALL_TOOLCHAINS = (
+    Toolchain('arm-linux-androideabi'),
+    Toolchain('aarch64-linux-android'),
+    Toolchain('x86'),
+    Toolchain('x86_64'),
 )
 
 
 ALL_TRIPLES = (
-    "arm-linux-androideabi",
-    "aarch64-linux-android",
-    "riscv64-linux-android",
-    "i686-linux-android",
-    "x86_64-linux-android",
+    'arm-linux-androideabi',
+    'aarch64-linux-android',
+    'i686-linux-android',
+    'x86_64-linux-android',
 )
 
 
+def arch_to_toolchain(arch: Arch) -> Toolchain:
+    """Returns the NDK toolchain name for the given architecture."""
+    return dict(zip(ALL_ARCHITECTURES, ALL_TOOLCHAINS))[arch]
+
+
 def arch_to_triple(arch: Arch) -> str:
     """Returns the triple for the given architecture."""
     return dict(zip(ALL_ARCHITECTURES, ALL_TRIPLES))[arch]
 
 
+def toolchain_to_arch(toolchain: Toolchain) -> Arch:
+    """Returns the architecture for the given toolchain."""
+    return dict(zip(ALL_TOOLCHAINS, ALL_ARCHITECTURES))[toolchain]
+
+
+def arch_to_abis(arch: Arch) -> List[Abi]:
+    """Returns the ABIs for the given architecture."""
+    return {
+        Arch('arm'): [Abi('armeabi-v7a')],
+        Arch('arm64'): [Abi('arm64-v8a')],
+        Arch('x86'): [Abi('x86')],
+        Arch('x86_64'): [Abi('x86_64')],
+    }[arch]
+
+
 def abi_to_arch(abi: Abi) -> Arch:
     """Returns the architecture for the given ABI."""
     return {
-        Abi("armeabi-v7a"): Arch("arm"),
-        Abi("arm64-v8a"): Arch("arm64"),
-        Abi("riscv64"): Arch("riscv64"),
-        Abi("x86"): Arch("x86"),
-        Abi("x86_64"): Arch("x86_64"),
+        Abi('armeabi-v7a'): Arch('arm'),
+        Abi('arm64-v8a'): Arch('arm64'),
+        Abi('x86'): Arch('x86'),
+        Abi('x86_64'): Arch('x86_64'),
     }[abi]
 
 
-def abi_to_triple(abi: Abi) -> str:
-    """Returns the triple for the given ABI."""
-    return arch_to_triple(abi_to_arch(abi))
+def clang_target(arch: Arch, api: int = None) -> str:
+    """Returns the Clang target to be used for the given arch/API combo.
 
-
-def clang_target(abi: Abi, api: Optional[int] = None) -> str:
-    """Returns the Clang target to be used for the given ABI/API combo.
-
-    api: API level to compile for. Defaults to the lowest supported API
-        level for the architecture if None.
+    Args:
+        arch: Architecture to compile for. 'arm' will target ARMv7.
+        api: API level to compile for. Defaults to the lowest supported API
+            level for the architecture if None.
     """
     if api is None:
+        # Currently there is only one ABI per arch.
+        abis = arch_to_abis(arch)
+        assert len(abis) == 1
+        abi = abis[0]
         api = min_api_for_abi(abi)
-    triple = abi_to_triple(abi)
-    if abi == Abi("armeabi-v7a"):
-        triple = "armv7a-linux-androideabi"
-    return f"{triple}{api}"
+    triple = arch_to_triple(arch)
+    if arch == 'arm':
+        triple = 'armv7a-linux-androideabi'
+    return f'{triple}{api}'
 
 
 def min_api_for_abi(abi: Abi) -> int:
     """Returns the minimum supported build API for the given ABI.
 
-    >>> min_api_for_abi(Abi('arm64-v8a'))
+    >>> min_api_for_abi('arm64-v8a')
     21
 
-    >>> min_api_for_abi(Abi('armeabi-v7a'))
-    21
+    >>> min_api_for_abi('armeabi-v7a')
+    16
 
-    >>> min_api_for_abi(Abi('foobar'))
+    >>> min_api_for_abi('foobar')
     Traceback (most recent call last):
         ...
     ValueError: Invalid ABI: foobar
     """
-    if abi == Abi("riscv64"):
-        return FIRST_RISCV64_API_LEVEL
     if abi in LP64_ABIS:
-        return FIRST_LP64_API_LEVEL
-    if abi in LP32_ABIS:
-        return MIN_API_LEVEL
-    raise ValueError("Invalid ABI: {}".format(abi))
-
-
-def iter_abis_for_api(api: int) -> Iterator[Abi]:
-    """Returns an Iterator over ABIs available at the given API level."""
-    for abi in ALL_ABIS:
-        if min_api_for_abi(abi) <= api:
-            yield abi
+        return 21
+    elif abi in LP32_ABIS:
+        return 16
+    else:
+        raise ValueError('Invalid ABI: {}'.format(abi))
diff --git a/ndk/ansi.py b/ndk/ansi.py
index b32d1e6..260d605 100644
--- a/ndk/ansi.py
+++ b/ndk/ansi.py
@@ -14,16 +14,17 @@
 # limitations under the License.
 #
 """ANSI terminal control."""
-from __future__ import absolute_import, print_function
+from __future__ import absolute_import
+from __future__ import print_function
 
 import contextlib
 import os
+import subprocess
 import sys
-from typing import Any, Iterator, TextIO
+from typing import Any, Iterator, Optional, NamedTuple, TextIO
 
 try:
     import termios
-
     HAVE_TERMIOS = True
 except ImportError:
     HAVE_TERMIOS = False
@@ -33,47 +34,48 @@
     """Returns the command to move the cursor up a given number of lines."""
     # \033[0A still goes up one line. Emit nothing.
     if num_lines == 0:
-        return ""
-    return f"\033[{num_lines}A"
+        return ''
+    return f'\033[{num_lines}A'
 
 
 def cursor_down(num_lines: int) -> str:
     """Returns the command to move the cursor down a given number of lines."""
     # \033[0B still goes down one line. Emit nothing.
     if num_lines == 0:
-        return ""
-    return f"\033[{num_lines}B"
+        return ''
+    return f'\033[{num_lines}B'
 
 
 def goto_first_column() -> str:
     """Returns the command to move the cursor to the first column."""
-    return "\033[1G"
+    return '\033[1G'
 
 
 def clear_line() -> str:
     """Returns the command to clear the current line."""
-    return "\033[K"
+    return '\033[K'
 
 
 def font_bold() -> str:
     """Returns the command to set the font to bold."""
-    return "\033[1m"
+    return '\033[1m'
 
 
 def font_faint() -> str:
     """Returns the command to set the font to faint."""
-    return "\033[2m"
+    return '\033[2m'
 
 
 def font_reset() -> str:
     """Returns the command to reset the font style."""
-    return "\033[0m"
+    return '\033[0m'
 
 
 def is_self_in_tty_foreground_group(fd: TextIO) -> bool:
     """Is this process in the foreground process group of a tty identified
     by fd?"""
-    return HAVE_TERMIOS and fd.isatty() and os.getpgrp() == os.tcgetpgrp(fd.fileno())
+    return HAVE_TERMIOS and fd.isatty() and \
+        os.getpgrp() == os.tcgetpgrp(fd.fileno())
 
 
 @contextlib.contextmanager
@@ -86,7 +88,7 @@
         termattr = termios.tcgetattr(fd)
         # This is the example from the termios docs, but it doesn't pass type
         # checking...
-        termattr[3] &= ~termios.ECHO
+        termattr[3] &= ~termios.ECHO  # type: ignore
         termios.tcsetattr(fd, termios.TCSANOW, termattr)
         try:
             yield
@@ -96,6 +98,27 @@
         yield
 
 
+class ConsoleRect(NamedTuple):
+    """A pair of width and height for a console."""
+
+    #: Console width.
+    width: int
+
+    #: Console height.
+    height: int
+
+
+def get_console_size_linux() -> ConsoleRect:
+    """Returns a pair of height, width for the TTY."""
+    height_str, width_str = subprocess.check_output(['stty', 'size']).split()
+    return ConsoleRect(width=int(width_str), height=int(height_str))
+
+
+def get_console_size_windows() -> ConsoleRect:
+    """Returns a pair of height, width for the TTY."""
+    raise NotImplementedError
+
+
 class Console:
     """Manages the state of a console for a stream."""
 
@@ -136,29 +159,30 @@
 
 def get_console(stream: TextIO = sys.stdout) -> Console:
     """Returns a Console bound to the given stream."""
-    if stream.isatty() and os.name != "nt":
+    if stream.isatty() and os.name != 'nt':
         return AnsiConsole(stream)
-    return NonAnsiConsole(stream)
+    else:
+        return DumbConsole(stream)
 
 
 class AnsiConsole(Console):
     """A console that supports ANSI control."""
 
-    GOTO_HOME = "\r"
-    CURSOR_UP = "\033[1A"
-    CLEAR_LINE = "\033[K"
-    HIDE_CURSOR = "\033[?25l"
-    SHOW_CURSOR = "\033[?25h"
+    GOTO_HOME = '\r'
+    CURSOR_UP = '\033[1A'
+    CLEAR_LINE = '\033[K'
+    HIDE_CURSOR = '\033[?25l'
+    SHOW_CURSOR = '\033[?25h'
 
-    _size: os.terminal_size
+    _size: Optional[ConsoleRect]
 
     def __init__(self, stream: TextIO) -> None:
         super().__init__(stream, smart_console=True)
-        self._size = os.get_terminal_size()
+        self._size = None
 
     def _do(self, cmd: str) -> None:
         """Performs the given command."""
-        print(cmd, end="", file=self.stream)
+        print(cmd, end='', file=self.stream)
         self.stream.flush()
 
     def clear_lines(self, num_lines: int) -> None:
@@ -168,7 +192,7 @@
             if idx != 0:
                 cmds.append(self.CURSOR_UP)
             cmds.append(self.CLEAR_LINE)
-        self._do("".join(cmds))
+        self._do(''.join(cmds))
 
     def hide_cursor(self) -> None:
         self._do(self.HIDE_CURSOR)
@@ -176,18 +200,31 @@
     def show_cursor(self) -> None:
         self._do(self.SHOW_CURSOR)
 
+    def init_window_size(self) -> None:
+        """Initializes the console size."""
+        if os.name == 'nt':
+            self._size = get_console_size_windows()
+        else:
+            self._size = get_console_size_linux()
+
     @property
     def height(self) -> int:
         """The height of the console in characters."""
-        return self._size.lines
+        if self._size is None:
+            self.init_window_size()
+        assert self._size is not None
+        return self._size.height
 
     @property
     def width(self) -> int:
         """The width of the console in characters."""
-        return self._size.columns
+        if self._size is None:
+            self.init_window_size()
+        assert self._size is not None
+        return self._size.width
 
 
-class NonAnsiConsole(Console):
+class DumbConsole(Console):
     """A console that does not support any ANSI features."""
 
     def __init__(self, stream: TextIO) -> None:
diff --git a/ndk/archive.py b/ndk/archive.py
deleted file mode 100644
index 60c0ff1..0000000
--- a/ndk/archive.py
+++ /dev/null
@@ -1,217 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Helper functions for reading and writing .zip and .tar.bz2 archives."""
-import os
-import shutil
-import subprocess
-from pathlib import Path
-from typing import List
-
-import ndk.paths
-from ndk.hosts import Host
-
-
-def make_bztar(base_name: Path, root_dir: Path, base_dir: Path) -> None:
-    """Create a compressed tarball.
-
-    Arguments have the same name and meaning as shutil.make_archive.
-
-    Args:
-        base_name: Base name of archive to create. ".tar.bz2" will be appended.
-        root_dir: Directory that's the root of the archive.
-        base_dir: Directory relative to root_dir to archive.
-    """
-    if not root_dir.is_dir():
-        raise RuntimeError(f"Not a directory: {root_dir}")
-    if not (root_dir / base_dir).is_dir():
-        raise RuntimeError(f"Not a directory: {root_dir}/{base_dir}")
-
-    if os.name == "nt":
-        shutil.make_archive(
-            str(base_name),
-            "bztar",
-            str(root_dir),
-            str(base_dir),
-        )
-    else:
-        subprocess.check_call(
-            [
-                "tar",
-                (
-                    "-j"
-                    if shutil.which("pbzip2") is None
-                    else "--use-compress-prog=pbzip2"
-                ),
-                "-cf",
-                str(base_name.with_suffix(".tar.bz2")),
-                "-C",
-                str(root_dir),
-                str(base_dir),
-            ]
-        )
-
-
-def make_brtar(
-    base_name: Path, root_dir: Path, base_dir: Path, preserve_symlinks: bool
-) -> Path:
-    """Create a Brotli-compressed tarball.
-
-    Arguments have the same name and meaning as shutil.make_archive.
-
-    Args:
-        base_name: Base name of archive to create. ".tar.br" will be appended.
-        root_dir: Directory that's the root of the archive.
-        base_dir: Directory relative to root_dir to archive.
-    """
-    if not root_dir.is_dir():
-        raise RuntimeError(f"Not a directory: {root_dir}")
-    if not (root_dir / base_dir).is_dir():
-        raise RuntimeError(f"Not a directory: {root_dir}/{base_dir}")
-
-    br_file = base_name.with_suffix(".tar.br")
-
-    if os.name == "nt":
-        raise NotImplementedError
-    cmd = ["tar"]
-    if not preserve_symlinks:
-        cmd.append("--dereference")
-    cmd.extend(
-        [
-            "--use-compress-program",
-            str(
-                ndk.paths.android_path(
-                    "prebuilts/build-tools/{host}-x86/bin/brotli".format(
-                        host=Host.current().value
-                    )
-                )
-            )
-            # Choice of 7 as quality parameter based on the following data:
-            #
-            # q | size (MB) | compression time relative to -q 0
-            # --+-----------+----------------------------------
-            # 0 | 622       |  0:00
-            # 2 | 514       |  0:10
-            # 5 | 447       |  1:14
-            # 6 | 435       |  1:48
-            # 7 | 401       |  3:24
-            # 8 | 393       |  5:35
-            # 9 | 388       | 10:37
-            + " -q 7",
-            "-cf",
-            str(br_file),
-            "-C",
-            str(root_dir),
-            str(base_dir),
-        ]
-    )
-    subprocess.check_call(cmd)
-    return br_file
-
-
-# For (un)zipping archives on Unix-like systems, the "zip" and "unzip" commands
-# are pretty universally available.
-#
-# For Windows, the situation is more complicated. After trying and rejecting
-# several options, the somewhat surprising best choice is the "tar"
-# command, which is available in Windows since 2018:
-# https://docs.microsoft.com/en-us/virtualization/community/team-blog/2017/20171219-tar-and-curl-come-to-windows
-# Note that this is bsdtar, which has slightly different command-line flags
-# than GNU tar.
-#
-# For the record, here are other options, and why they didn't work:
-#
-# - Python's built-in shutil.unpack_archive uses the "zipfile" module, which
-#   does not restore permissions, including the executable bit, when
-#   unzipping. https://bugs.python.org/issue15795
-#
-# - 7-zip is popular and works on a wide range of Windows versions,
-#   but it is not guaranteed to be available, and is, in fact, not
-#   available on our Windows build machines.
-#
-# - Expand-Archive in PowerShell results in modification times in the future
-#   for NDK .zip files, possibly due to not handling time zones correctly.
-#
-# For more information, see https://superuser.com/questions/1314420/how-to-unzip-a-file-using-the-cmd
-#
-# See also the following changes:
-# - 7-zip: https://android-review.googlesource.com/c/platform/ndk/+/1963599
-# - PowerShell: https://android-review.googlesource.com/c/platform/ndk/+/1965510
-# - Tar: https://android-review.googlesource.com/c/platform/ndk/+/1967235
-
-
-def make_zip(
-    base_name: Path, root_dir: Path, paths: List[str], preserve_symlinks: bool
-) -> Path:
-    """Creates a zip package for distribution.
-
-    Args:
-        base_name: Path (without extension) to the output archive.
-        root_dir: Path to the directory from which to perform the packaging
-                  (identical to tar's -C).
-        paths: Paths to files and directories to package, relative to root_dir.
-        preserve_symlinks: Whether to preserve or flatten symlinks. Should be
-            false when creating packages for Windows, but otherwise true.
-    """
-    if not root_dir.is_dir():
-        raise RuntimeError(f"Not a directory: {root_dir}")
-
-    cwd = os.getcwd()
-    zip_file = base_name.with_suffix(".zip")
-    if zip_file.exists():
-        zip_file.unlink()
-
-    # See comment above regarding .zip files on Windows.
-    if os.name == "nt":
-        # Explicit path, to avoid conflict with Cygwin.
-        args = ["c:/windows/system32/tar.exe", "-a"]
-        if not preserve_symlinks:
-            args.append("-L")
-        args.extend(["-cf", str(zip_file)])
-    else:
-        args = ["zip", "-9qr", str(zip_file)]
-        if preserve_symlinks:
-            args.append("--symlinks")
-    args.extend(paths)
-    os.chdir(root_dir)
-    try:
-        subprocess.check_call(args)
-        return zip_file
-    finally:
-        os.chdir(cwd)
-
-
-def unzip(zip_file: Path, dest_dir: Path) -> None:
-    """Unzip zip_file into dest_dir."""
-    if not zip_file.is_file() or zip_file.suffix != ".zip":
-        raise RuntimeError(f"Not a .zip file: {zip_file}")
-    if not dest_dir.is_dir():
-        raise RuntimeError(f"Not a directory: {dest_dir}")
-
-    # See comment above regarding .zip files on Windows.
-    if os.name == "nt":
-        subprocess.check_call(
-            [
-                # Explicit path, to avoid conflict with Cygwin.
-                "c:/windows/system32/tar.exe",
-                "xf",
-                str(zip_file),
-                "-C",
-                str(dest_dir),
-            ]
-        )
-    else:
-        # Unzip seems to be pretty universally available on posix systems.
-        subprocess.check_call(["unzip", "-qq", str(zip_file), "-d", str(dest_dir)])
diff --git a/ndk/autoconf.py b/ndk/autoconf.py
index ad2fd49..b17e023 100644
--- a/ndk/autoconf.py
+++ b/ndk/autoconf.py
@@ -16,43 +16,42 @@
 """APIs for dealing with autoconf scripts."""
 import multiprocessing
 import os
+from pathlib import Path
+import pipes
 import pprint
-import shlex
 import shutil
 import subprocess
-from pathlib import Path
 from typing import ContextManager, Dict, List, Optional
 
 import ndk.ext.os
+from ndk.hosts import Host, get_default_host
 import ndk.paths
 import ndk.toolchains
-from ndk.hosts import Host, get_default_host
+
 
 HOST_TRIPLE_MAP = {
-    Host.Darwin: "x86_64-apple-darwin",
-    Host.Linux: "x86_64-linux-gnu",
-    Host.Windows64: "x86_64-w64-mingw32",
+    Host.Darwin: 'x86_64-apple-darwin',
+    Host.Linux: 'x86_64-linux-gnu',
+    Host.Windows64: 'x86_64-w64-mingw32',
 }
 
 
 class AutoconfBuilder:
     """Builder for an autoconf project."""
 
-    jobs_arg = f"-j{multiprocessing.cpu_count()}"
+    jobs_arg = f'-j{multiprocessing.cpu_count()}'
 
     toolchain: ndk.toolchains.Toolchain
 
-    def __init__(
-        self,
-        configure_script: Path,
-        build_dir: Path,
-        host: Host,
-        add_toolchain_to_path: bool = False,
-        no_build_or_host: bool = False,
-        no_strip: bool = False,
-        additional_flags: Optional[list[str]] = None,
-        additional_env: Optional[Dict[str, str]] = None,
-    ) -> None:
+    def __init__(self,
+                 configure_script: Path,
+                 build_dir: Path,
+                 host: Host,
+                 add_toolchain_to_path: bool = False,
+                 use_clang: bool = False,
+                 no_build_or_host: bool = False,
+                 no_strip: bool = False,
+                 additional_flags: List[str] = None) -> None:
         """Initializes an autoconf builder.
 
         Args:
@@ -64,25 +63,26 @@
             add_toolchain_to_path: Adds the toolchain directory to the PATH
                 when invoking configure and make. Needed for some projects that
                 don't allow all tools to be passed via the environment.
+            use_clang: Set to True to use Clang to build this project.
             no_build_or_host: Don't pass --build or --host to configure.
             no_strip: Don't pass -s to compiler.
-            additional_flags: Additional flags to pass to the compiler.
-            additional_env: Additional environment to set, used during
-                configure, build, and install.
         """
         self.configure_script = configure_script
         self.build_directory = build_dir
         self.host = host
         self.add_toolchain_to_path = add_toolchain_to_path
+        self.use_clang = use_clang
         self.no_build_or_host = no_build_or_host
         self.no_strip = no_strip
         self.additional_flags = additional_flags
-        self.additional_env = additional_env
 
-        self.working_directory = self.build_directory / "build"
-        self.install_directory = self.build_directory / "install"
+        self.working_directory = self.build_directory / 'build'
+        self.install_directory = self.build_directory / 'install'
 
-        self.toolchain = ndk.toolchains.ClangToolchain(self.host)
+        if use_clang:
+            self.toolchain = ndk.toolchains.ClangToolchain(self.host)
+        else:
+            self.toolchain = ndk.toolchains.GccToolchain(self.host)
 
     @property
     def flags(self) -> List[str]:
@@ -90,48 +90,44 @@
         # TODO: Are these the flags we want? These are what we've used
         # historically.
         flags = [
-            "-Os",
-            "-fomit-frame-pointer",
+            '-Os',
+            '-fomit-frame-pointer',
+
             # AC_CHECK_HEADERS fails if the compiler emits any warnings. We're
             # guaranteed to hit -Wunused-command-line-argument since autoconf
             # does a bad job with cflags/ldflags, so we need to pass all of the
             # flags all the time, but use -w since we won't be fixing any GDB
             # warnings anyway and failures caused by this don't actually appear
             # until much later in the build.
-            "-w",
+            '-w',
         ]
-        if not self.host == Host.Darwin:
-            flags.append("-fuse-ld=lld")
         if not self.no_strip:
-            flags.append("-s")
+            flags.append('-s')
         if self.additional_flags:
             flags.extend(self.additional_flags)
         return flags
 
-    def cd(self) -> ContextManager[None]:
+    def cd(self) -> ContextManager:
         """Context manager that moves into the working directory."""
-        return ndk.ext.os.cd(self.working_directory)
+        return ndk.ext.os.cd(str(self.working_directory))
 
-    def _run(self, cmd: List[str], extra_env: Optional[Dict[str, str]] = None) -> None:
+    def _run(self, cmd: List[str],
+             extra_env: Optional[Dict[str, str]] = None) -> None:
         """Runs and logs execution of a subprocess."""
         env = dict(extra_env) if extra_env is not None else {}
         if self.add_toolchain_to_path:
             paths = [str(p) for p in self.toolchain.bin_paths]
-            paths.append(os.environ["PATH"])
-            env["PATH"] = os.pathsep.join(paths)
+            paths.append(os.environ['PATH'])
+            env['PATH'] = os.pathsep.join(paths)
 
-        pp_cmd = " ".join([shlex.quote(arg) for arg in cmd])
+        pp_cmd = ' '.join([pipes.quote(arg) for arg in cmd])
         subproc_env = dict(os.environ)
         if env:
             subproc_env.update(env)
-        if self.additional_env:
-            subproc_env.update(self.additional_env)
-
-        if subproc_env != dict(os.environ):
             pp_env = pprint.pformat(env, indent=4)
-            print("Running: {} with env:\n{}".format(pp_cmd, pp_env))
+            print('Running: {} with env:\n{}'.format(pp_cmd, pp_env))
         else:
-            print("Running: {}".format(pp_cmd))
+            print('Running: {}'.format(pp_cmd))
 
         subprocess.run(cmd, env=subproc_env, check=True)
 
@@ -164,49 +160,45 @@
                 build_triple = HOST_TRIPLE_MAP[get_default_host()]
                 host_triple = HOST_TRIPLE_MAP[self.host]
                 build_host_args = [
-                    f"--build={build_triple}",
-                    f"--host={host_triple}",
+                    f'--build={build_triple}',
+                    f'--host={host_triple}',
                 ]
 
-            configure_args = (
-                [
-                    str(self.configure_script),
-                    f"--prefix={self.install_directory}",
-                ]
-                + build_host_args
-                + args
-            )
+            configure_args = [
+                str(self.configure_script),
+                f'--prefix={self.install_directory}',
+            ] + build_host_args + args
 
-            flags_str = " ".join(self.toolchain.flags + self.flags)
-            cc = f"{self.toolchain.cc} {flags_str}"
-            cxx = f"{self.toolchain.cxx} -stdlib=libc++ {flags_str}"
+            flags_str = ' '.join(self.toolchain.flags + self.flags)
+            cc = f'{self.toolchain.cc} {flags_str}'
+            cxx = f'{self.toolchain.cxx} -stdlib=libc++ {flags_str}'
 
             configure_env: Dict[str, str] = {
-                "CC": cc,
-                "CXX": cxx,
-                "LD": str(self.toolchain.ld),
-                "AR": str(self.toolchain.ar),
-                "AS": str(self.toolchain.asm),
-                "RANLIB": str(self.toolchain.ranlib),
-                "NM": str(self.toolchain.nm),
-                "STRIP": str(self.toolchain.strip),
-                "STRINGS": str(self.toolchain.strings),
+                'CC': cc,
+                'CXX': cxx,
+                'LD': str(self.toolchain.ld),
+                'AR': str(self.toolchain.ar),
+                'AS': str(self.toolchain.asm),
+                'RANLIB': str(self.toolchain.ranlib),
+                'NM': str(self.toolchain.nm),
+                'STRIP': str(self.toolchain.strip),
+                'STRINGS': str(self.toolchain.strings),
             }
             if self.host.is_windows:
-                configure_env["WINDRES"] = str(self.toolchain.rescomp)
-                configure_env["RESCOMP"] = str(self.toolchain.rescomp)
+                configure_env['WINDRES'] = str(self.toolchain.rescomp)
+                configure_env['RESCOMP'] = str(self.toolchain.rescomp)
 
             self._run(configure_args, configure_env)
 
     def make(self) -> None:
         """Builds the project."""
         with self.cd():
-            self._run(["make", self.jobs_arg])
+            self._run(['make', self.jobs_arg])
 
     def install(self) -> None:
         """Installs the project."""
         with self.cd():
-            self._run(["make", self.jobs_arg, "install"])
+            self._run(['make', self.jobs_arg, 'install'])
 
     def build(self, configure_args: Optional[List[str]] = None) -> None:
         """Configures and builds an autoconf project.
diff --git a/ndk/builds.py b/ndk/builds.py
index 7667cfb..a32b120 100644
--- a/ndk/builds.py
+++ b/ndk/builds.py
@@ -17,21 +17,25 @@
 
 Note: this isn't the ndk-build API, but the API for building the NDK itself.
 """
-from __future__ import annotations
+from __future__ import absolute_import
 
+# pylint: disable=import-error,no-name-in-module
+# https://github.com/PyCQA/pylint/issues/73
+from distutils.dir_util import copy_tree
+from enum import auto, Enum, unique
+import ntpath
+import os
+from pathlib import Path
 import shutil
 import stat
 import subprocess
-import textwrap
-import zipapp
-from enum import Enum, auto, unique
-from pathlib import Path, PureWindowsPath
-from typing import Any, Dict, Iterator, List, Optional, Set
+from typing import Iterable, List, Optional, Set
 
-import ndk.paths
+import ndk.abis
 from ndk.autoconf import AutoconfBuilder
-from ndk.cmake import CMakeBuilder
-from ndk.hosts import Host
+import ndk.ext.shutil
+import ndk.packaging
+import ndk.paths
 
 
 class ModuleValidateError(RuntimeError):
@@ -45,7 +49,6 @@
     The NDK ships two NOTICE files: one for the toolchain, and one for
     everything else.
     """
-
     BASE = auto()
     TOOLCHAIN = auto()
 
@@ -53,39 +56,24 @@
 class BuildContext:
     """Class containing build context information."""
 
-    def __init__(
-        self,
-        out_dir: Path,
-        dist_dir: Path,
-        modules: List[Module],
-        host: Host,
-        build_number: int,
-    ) -> None:
+    def __init__(self, out_dir: str, dist_dir: str, modules: List['Module'],
+                 host: ndk.hosts.Host, arches: List[ndk.abis.Arch],
+                 build_number: str) -> None:
         self.out_dir = out_dir
         self.dist_dir = dist_dir
         self.modules = {m.name: m for m in modules}
         self.host = host
+        self.arches = arches
         self.build_number = build_number
 
 
 class Module:
     """Base module type for the build system."""
 
-    # pylint wrongly emits no-member if these don't have default values
-    # https://github.com/PyCQA/pylint/issues/3167
-    #
-    # We override __getattribute__ to catch any uses of this value
-    # uninitialized and raise an error.
-    name: str = ""
-    install_path: Path = Path()
+    name: str
+    path: str
     deps: Set[str] = set()
 
-    def __getattribute__(self, name: str) -> Any:
-        attr = super().__getattribute__(name)
-        if name in ("name", "install_path") and attr == "":
-            raise RuntimeError(f"Uninitialized use of {name}")
-        return attr
-
     # Used to exclude a module from the build. If explicitly named it will
     # still be built, but it is not included by default.
     enabled = True
@@ -94,7 +82,7 @@
     # interface is a single path, not a list. For the rare modules that have
     # multiple notice files (such as yasm), the notices property should be
     # overrided. By default this property will return `[self.notice]`.
-    notice: Optional[Path] = None
+    notice: Optional[str] = None
 
     # Not all components need a notice (stub scripts, basic things like the
     # readme and changelog, etc), but this is opt-out.
@@ -106,11 +94,12 @@
     # text being included in NOTICE.toolchain.
     notice_group = NoticeGroup.BASE
 
-    # Set to True if this module is merely a build convenience and not intented
-    # to be shipped. For example, Platforms has its own build steps but is
-    # shipped within the Toolchain module. If this value is set, the module's
-    # install directory will not be within the NDK.
-    intermediate_module = False
+    # If split_build_by_arch is set, one workqueue task will be created for
+    # each architecture. The Module object will be cloned for each arch and
+    # each will have build_arch set to the architecture that should be built by
+    # that module. If build_arch is None, the module has not yet been split.
+    split_build_by_arch = False
+    build_arch: Optional[ndk.abis.Arch] = None
 
     def __init__(self) -> None:
         self.context: Optional[BuildContext] = None
@@ -119,17 +108,15 @@
         self.validate()
 
     @property
-    def notices(self) -> Iterator[Path]:
-        """Iterates over the notice files for this module."""
+    def notices(self) -> List[str]:
+        """Returns the list of notice files for this module."""
         if self.no_notice:
-            return
+            return []
         if self.notice is None:
-            return
-        yield self.notice
+            return []
+        return [self.notice]
 
-    # This can't actually be static because subclasses might use self, but for some
-    # reason pylint doesn't know that in this case.
-    def default_notice_path(self) -> Path | None:
+    def default_notice_path(self) -> Optional[str]:
         """Returns the path to the default notice for this module, if any."""
         return None
 
@@ -141,7 +128,7 @@
         Args:
             msg: Detailed error message.
         """
-        return ModuleValidateError(f"{self.name}: {msg}")
+        return ModuleValidateError(f'{self.name}: {msg}')
 
     def validate(self) -> None:
         """Validates module config.
@@ -150,11 +137,11 @@
             ModuleValidateError: The module configuration is not valid.
         """
         if self.name is None:
-            raise ModuleValidateError(f"{self.__class__} has no name")
-        if self.install_path is None:
-            raise self.validate_error("install_path property not set")
+            raise ModuleValidateError(f'{self.__class__} has no name')
+        if self.path is None:
+            raise self.validate_error('path property not set')
         if self.notice_group not in NoticeGroup:
-            raise self.validate_error("invalid notice group")
+            raise self.validate_error('invalid notice group')
         self.validate_notice()
 
     def validate_notice(self) -> None:
@@ -167,12 +154,13 @@
             return
 
         if not self.notices:
-            raise self.validate_error("notice property not set")
+            raise self.validate_error('notice property not set')
         for notice in self.notices:
-            if not notice.exists():
-                raise self.validate_error(f"notice file {notice} does not exist")
+            if not os.path.exists(notice):
+                raise self.validate_error(
+                    f'notice file {notice} does not exist')
 
-    def get_dep(self, name: str) -> Module:
+    def get_dep(self, name: str) -> 'Module':
         """Returns the module object for the given dependency.
 
         Returns:
@@ -187,36 +175,46 @@
         assert self.context is not None
         return self.context.modules[name]
 
-    def get_build_host_install(self) -> Path:
+    def get_build_host_install(self,
+                               arch: Optional[ndk.abis.Arch] = None) -> str:
         """Returns the module's install path for the current host.
 
         In a cross-compiling context (i.e. building the Windows NDK from
         Linux), this will return the install directory for the build OS rather
         than the target OS.
 
+        Args:
+            arch: Architecture to fetch for architecture-specific modules.
+
         Returns:
             This module's install path for the build host.
         """
-        return self.get_install_path(Host.current())
+        return self.get_install_path(ndk.hosts.get_default_host(), arch)
 
     @property
-    def out_dir(self) -> Path:
+    def out_dir(self) -> str:
         """Base out directory for the current build."""
         assert self.context is not None
         return self.context.out_dir
 
     @property
-    def dist_dir(self) -> Path:
+    def dist_dir(self) -> str:
         """Base dist directory for the current build."""
         assert self.context is not None
         return self.context.dist_dir
 
     @property
-    def host(self) -> Host:
+    def host(self) -> ndk.hosts.Host:
         """Host for the current build."""
         assert self.context is not None
         return self.context.host
 
+    @property
+    def arches(self) -> List[ndk.abis.Arch]:
+        """Architectures targeted by the current build."""
+        assert self.context is not None
+        return self.context.arches
+
     def build(self) -> None:
         """Builds the module.
 
@@ -225,7 +223,7 @@
 
         The build phase should not modify the install directory.
         """
-        raise NotImplementedError(f"{self.name} didn't implement build().")
+        raise NotImplementedError
 
     def install(self) -> None:
         """Installs the module.
@@ -235,13 +233,39 @@
         The install phase should only copy files, not create them. Compilation
         should happen in the build phase.
         """
-        raise NotImplementedError(f"{self.name} didn't implement install().")
+        package_installs = ndk.packaging.expand_packages(
+            self.name, self.path, self.host, self.arches)
 
-    def get_install_path(self, host: Optional[Host] = None) -> Path:
+        install_base = ndk.paths.get_install_path(self.out_dir, self.host)
+        for package_name, package_install in package_installs:
+            assert self.context is not None
+            install_path = os.path.join(install_base, package_install)
+            package = os.path.join(self.context.dist_dir, package_name)
+            if os.path.exists(install_path):
+                shutil.rmtree(install_path)
+            ndk.packaging.extract_zip(package, install_path)
+
+    def get_install_paths(
+            self, host: ndk.hosts.Host,
+            arches: Optional[Iterable[ndk.abis.Arch]]) -> List[str]:
+        """Returns the install paths for the given archiectures."""
+        install_subdirs = ndk.packaging.expand_paths(self.path, host, arches)
+        install_base = ndk.paths.get_install_path(self.out_dir, host)
+        return [os.path.join(install_base, d) for d in install_subdirs]
+
+    def get_install_path(self, host: Optional[ndk.hosts.Host] = None,
+                         arch: Optional[ndk.abis.Arch] = None) -> str:
         """Returns the install path for the given module config.
 
+        For an architecture-independent module, there should only ever be one
+        install path.
+
+        For an architecture-dependent module, the optional arch argument must
+        be provided to select between the install paths.
+
         Args:
             host: The host to use for a host-specific install path.
+            arch: The architecture to use for an architecure-dependent module.
 
         Raises:
             ValueError: This is an architecture-dependent module and no
@@ -252,18 +276,45 @@
         if host is None:
             host = self.host
 
-        install_subdir = ndk.paths.expand_path(self.install_path, host)
-        install_base = ndk.paths.get_install_path(self.out_dir, host)
-        if self.intermediate_module:
-            install_base = self.intermediate_out_dir / "install"
-        return install_base / install_subdir
+        arch_dependent = False
+        if ndk.packaging.package_varies_by(self.path, 'abi'):
+            arch_dependent = True
+        elif ndk.packaging.package_varies_by(self.path, 'arch'):
+            arch_dependent = True
+        elif ndk.packaging.package_varies_by(self.path, 'toolchain'):
+            arch_dependent = True
+        elif ndk.packaging.package_varies_by(self.path, 'triple'):
+            arch_dependent = True
+
+        arches = None
+        if arch is not None:
+            arches = [arch]
+        elif self.build_arch is not None:
+            arches = [self.build_arch]
+        elif arch_dependent:
+            raise ValueError(
+                f'get_install_path for {arch} requires valid arch')
+
+        install_subdirs = self.get_install_paths(host, arches)
+
+        if len(install_subdirs) != 1:
+            raise RuntimeError(
+                'non-unique install path for single arch: ' + self.path)
+
+        return install_subdirs[0]
 
     @property
     def intermediate_out_dir(self) -> Path:
         """Path for intermediate outputs of this module."""
-        return self.out_dir / self.host.value / self.name
+        base_path = Path(self.out_dir) / self.host.value / self.name
+        if self.split_build_by_arch:
+            return base_path / self.build_arch
+        else:
+            return base_path
 
     def __str__(self) -> str:
+        if self.split_build_by_arch and self.build_arch is not None:
+            return f'{self.name} [{self.build_arch}]'
         return self.name
 
     def __hash__(self) -> int:
@@ -278,17 +329,21 @@
     @property
     def log_file(self) -> str:
         """Returns the basename of the log file for this module."""
-        return f"{self.name}.log"
+        if self.split_build_by_arch and self.build_arch is not None:
+            return f'{self.name}-{self.build_arch}.log'
+        elif self.split_build_by_arch:
+            raise RuntimeError('Called log_file on unsplit module')
+        else:
+            return f'{self.name}.log'
 
-    def log_path(self, log_dir: Path) -> Path:
+    def log_path(self, log_dir: str) -> str:
         """Returns the path to the log file for this module."""
-        return log_dir / self.log_file
+        return os.path.join(log_dir, self.log_file)
 
 
 class AutoconfModule(Module):
     # Path to the source code
     src: Path
-    env: Optional[Dict[str, str]] = None
 
     _builder: Optional[AutoconfBuilder] = None
 
@@ -297,11 +352,10 @@
         """Returns the lazily initialized builder for this module."""
         if self._builder is None:
             self._builder = AutoconfBuilder(
-                self.src / "configure",
+                self.src / 'configure',
                 self.intermediate_out_dir,
                 self.host,
-                additional_env=self.env,
-            )
+                use_clang=True)
         return self._builder
 
     @property
@@ -316,54 +370,10 @@
 
     def install(self) -> None:
         install_dir = self.get_install_path()
-        install_dir.mkdir(parents=True, exist_ok=True)
-        shutil.copytree(self.builder.install_directory, install_dir, dirs_exist_ok=True)
-
-
-class CMakeModule(Module):
-    # Path to the source code
-    src: Path
-    _builder: Optional[CMakeBuilder] = None
-    run_ctest: bool = False
-
-    @property
-    def builder(self) -> CMakeBuilder:
-        """Returns the lazily initialized builder for this module."""
-        if self._builder is None:
-            self._builder = CMakeBuilder(
-                self.src,
-                self.intermediate_out_dir,
-                self.host,
-                additional_flags=self.flags,
-                additional_ldflags=self.ldflags,
-                additional_env=self.env,
-                run_ctest=self.run_ctest,
-            )
-        return self._builder
-
-    @property
-    def env(self) -> Dict[str, str]:
-        return {}
-
-    @property
-    def flags(self) -> List[str]:
-        return []
-
-    @property
-    def ldflags(self) -> List[str]:
-        return []
-
-    @property
-    def defines(self) -> Dict[str, str]:
-        return {}
-
-    def build(self) -> None:
-        self.builder.build(self.defines)
-
-    def install(self) -> None:
-        install_dir = self.get_install_path()
-        install_dir.mkdir(parents=True, exist_ok=True)
-        shutil.copytree(self.builder.install_directory, install_dir, dirs_exist_ok=True)
+        ndk.ext.shutil.create_directory(install_dir)
+        copy_tree(
+            str(self.builder.install_directory),
+            str(install_dir))
 
 
 class PackageModule(Module):
@@ -373,24 +383,86 @@
     """
 
     #: The absolute path to the directory to be installed.
-    src: Path
+    src: str
 
-    def default_notice_path(self) -> Path:
-        return self.src / "NOTICE"
+    def default_notice_path(self) -> str:
+        return os.path.join(self.src, 'NOTICE')
+
+    def validate(self) -> None:
+        super().validate()
+
+        if ndk.packaging.package_varies_by(self.path, 'abi'):
+            raise self.validate_error(
+                'PackageModule cannot vary by abi')
+        if ndk.packaging.package_varies_by(self.path, 'arch'):
+            raise self.validate_error(
+                'PackageModule cannot vary by arch')
+        if ndk.packaging.package_varies_by(self.path, 'toolchain'):
+            raise self.validate_error(
+                'PackageModule cannot vary by toolchain')
+        if ndk.packaging.package_varies_by(self.path, 'triple'):
+            raise self.validate_error(
+                'PackageModule cannot vary by triple')
 
     def build(self) -> None:
         pass
 
     def install(self) -> None:
-        install_path = self.get_install_path(self.host)
+        install_paths = self.get_install_paths(self.host,
+                                               ndk.abis.ALL_ARCHITECTURES)
+        assert len(install_paths) == 1
+        install_path = install_paths[0]
         install_directory(self.src, install_path)
 
 
+class InvokeExternalBuildModule(Module):
+    """A module that uses a build.py script.
+
+    These are legacy modules that have not yet been properly merged into
+    checkbuild.py.
+    """
+
+    #: The path to the build script relative to the top of the source tree.
+    script: str
+
+    #: True if the module can be built in parallel per-architecture.
+    arch_specific = False
+
+    def build(self) -> None:
+        build_args = common_build_args(self.out_dir, self.dist_dir, self.host)
+        if self.split_build_by_arch:
+            build_args.append(f'--arch={self.build_arch}')
+        elif self.arch_specific and len(self.arches) == 1:
+            build_args.append(f'--arch={self.arches[0]}')
+        elif set(self.arches) == set(ndk.abis.ALL_ARCHITECTURES):
+            pass
+        else:
+            raise NotImplementedError(
+                f'Module {self.name} can only build all architectures or none')
+        script = self.get_script_path()
+        invoke_external_build(script, build_args)
+
+    def get_script_path(self) -> str:
+        """Returns the absolute path to the build script."""
+        return ndk.paths.android_path(self.script)
+
+
+class InvokeBuildModule(InvokeExternalBuildModule):
+    """A module that uses a build.py script within ndk/build/tools.
+
+    Identical to InvokeExternalBuildModule, but the script path is relative to
+    ndk/build/tools instead of the top of the source tree.
+    """
+
+    def get_script_path(self) -> str:
+        return ndk.paths.ndk_path('build/tools', self.script)
+
+
 class FileModule(Module):
     """A module that installs a single file to the NDK."""
 
     #: Path to the file to be installed.
-    src: Path
+    src: str
 
     #: True if no notice file is needed for this module.
     no_notice = True
@@ -399,9 +471,31 @@
         pass
 
     def install(self) -> None:
-        install_path = self.get_install_path()
-        install_path.parent.mkdir(parents=True, exist_ok=True)
-        shutil.copy2(self.src, install_path)
+        install_dir = self.get_install_path()
+        ndk.ext.shutil.create_directory(install_dir)
+        shutil.copy2(self.src, install_dir)
+
+
+class MultiFileModule(Module):
+    """A module that installs multiple files to the NDK.
+
+    This is similar to FileModule, but allows multiple files to be installed
+    with a single module.
+    """
+
+    @property
+    def files(self) -> Iterable[str]:
+        """List of absolute paths to files to be installed."""
+        return []
+
+    def build(self) -> None:
+        pass
+
+    def install(self) -> None:
+        install_dir = self.get_install_path()
+        ndk.ext.shutil.create_directory(install_dir)
+        for file_path in self.files:
+            shutil.copy2(file_path, install_dir)
 
 
 class ScriptShortcutModule(Module):
@@ -414,7 +508,7 @@
     """
 
     #: The path to the installed NDK script, relative to the top of the NDK.
-    script: Path
+    script: str
 
     #: The file extension for the called script on Windows.
     windows_ext: str
@@ -422,13 +516,24 @@
     # These are all trivial shell scripts that we generated. No notice needed.
     no_notice = True
 
-    disallow_windows_install_path_with_spaces: bool = False
-
     def validate(self) -> None:
         super().validate()
 
+        if ndk.packaging.package_varies_by(self.script, 'abi'):
+            raise self.validate_error(
+                'ScriptShortcutModule cannot vary by abi')
+        if ndk.packaging.package_varies_by(self.script, 'arch'):
+            raise self.validate_error(
+                'ScriptShortcutModule cannot vary by arch')
+        if ndk.packaging.package_varies_by(self.script, 'toolchain'):
+            raise self.validate_error(
+                'ScriptShortcutModule cannot vary by toolchain')
+        if ndk.packaging.package_varies_by(self.script, 'triple'):
+            raise self.validate_error(
+                'ScriptShortcutModule cannot vary by triple')
         if self.windows_ext is None:
-            raise self.validate_error("ScriptShortcutModule requires windows_ext")
+            raise self.validate_error(
+                'ScriptShortcutModule requires windows_ext')
 
     def build(self) -> None:
         pass
@@ -441,44 +546,39 @@
 
     def make_cmd_helper(self) -> None:
         """Makes a .cmd helper script for Windows."""
-        script = self.get_script_path().with_suffix(self.windows_ext)
+        script = self.get_script_path()
+        full_path = ntpath.join(
+            '%~dp0', ntpath.normpath(script) + self.windows_ext)
 
-        install_path = self.get_install_path().with_suffix(".cmd")
-        text = "@echo off\n"
-        if self.disallow_windows_install_path_with_spaces:
-            text += textwrap.dedent(
-                """\
-                rem https://stackoverflow.com/a/29057742/632035
-                for /f "tokens=2" %%a in ("%~dp0") do (
-                    echo ERROR: NDK path cannot contain spaces
-                    exit /b 1
-                )
-                """
-            )
-        text += f"%~dp0{PureWindowsPath(script)} %*"
-        install_path.write_text(text)
+        install_path = self.get_install_path() + '.cmd'
+        with open(os.path.join(install_path), 'w') as helper:
+            helper.writelines([
+                '@echo off\n',
+                full_path + ' %*\n',
+            ])
 
     def make_sh_helper(self) -> None:
         """Makes a bash helper script for POSIX systems."""
         script = self.get_script_path()
-        full_path = Path("$DIR") / script
+        full_path = os.path.join('$DIR', script)
 
         install_path = self.get_install_path()
-        install_path.write_text(
-            textwrap.dedent(
-                f"""\
-                #!/bin/sh
-                DIR=$(cd "$(dirname "$0")" && pwd)
-                "{full_path}" "$@"
-                """
-            )
-        )
-        mode = install_path.stat().st_mode
-        install_path.chmod(mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
+        with open(install_path, 'w') as helper:
+            helper.writelines([
+                '#!/bin/sh\n',
+                'DIR="$(cd "$(dirname "$0")" && pwd)"\n',
+                full_path + ' "$@"',
+            ])
+        mode = os.stat(install_path).st_mode
+        os.chmod(install_path,
+                 mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
 
-    def get_script_path(self) -> Path:
+    def get_script_path(self) -> str:
         """Returns the installed path of the script."""
-        return ndk.paths.expand_path(self.script, self.host)
+        scripts = ndk.packaging.expand_paths(
+            self.script, self.host, ndk.abis.ALL_ARCHITECTURES)
+        assert len(scripts) == 1
+        return scripts[0]
 
 
 class PythonPackage(Module):
@@ -491,162 +591,55 @@
     may be regnenerated using only artifacts from the build server.
     """
 
-    def default_notice_path(self) -> Path:
+    def default_notice_path(self) -> str:
         # Assume there's a NOTICE file in the same directory as the setup.py.
-        return self.install_path.parent / "NOTICE"
+        return os.path.join(os.path.dirname(self.path), 'NOTICE')
 
     def build(self) -> None:
+        cwd = os.path.dirname(self.path)
         subprocess.check_call(
-            ["python3", str(self.install_path), "sdist", "-d", self.out_dir],
-            cwd=self.install_path.parent,
-        )
+            ['python3', self.path, 'sdist', '-d', self.out_dir], cwd=cwd)
 
     def install(self) -> None:
         pass
 
 
-class PythonApplication(Module):
-    """A PEP 441 Python Zip Application.
+def invoke_external_build(script: str, args: List[str]) -> None:
+    """Invokes a build.py script rooted within the top level source tree.
 
-    https://peps.python.org/pep-0441/
-
-    A Python Zip Application is a zipfile of a Python package with an entry point that
-    is runnable by the Python interpreter. PythonApplication will create a the pyz
-    application with its bundled dependencies and a launcher script that will invoke it
-    using the NDK's bundled Python interpreter.
+    Args:
+        script: Path to the script to be executed within the top level source
+            tree.
+        args: Command line arguments to be passed to the script.
     """
-
-    package: Path
-    py_pkg_deps: list[Path] = []
-    copy_to_python_path: list[Path] = []
-    main: str
-
-    def build(self) -> None:
-        if self._staging.exists():
-            shutil.rmtree(self._staging)
-        self._staging.mkdir(parents=True)
-
-        if self.package.is_file():
-            shutil.copy(self.package, self._staging / self.package.name)
-            (self._staging / "__init__.py").touch()
-        else:
-            shutil.copytree(self.package, self._staging / self.package.name)
-
-        for path in self.copy_to_python_path:
-            if path.is_file():
-                shutil.copy(path, self._staging / path.name)
-            else:
-                shutil.copytree(path, self._staging / path.name)
-
-        # Ideally this would be `pip install` (and it once was), but new
-        # versions of pip don't support installing source packages without the
-        # `wheel` package (https://github.com/pypa/pip/issues/8368). `wheel`
-        # isn't part of the stdlib, and we can't fetch it from PyPI in CI, so we
-        # can't use pip to install it any more.
-        #
-        # The risk is that we can't account for any quirks of the package. This
-        # works for trivial packages, but anything with a complicated build
-        # would not be installed correctly. We might detect too many or too few
-        # sources, install packages to the wrong path, miss data files, etc.
-        #
-        # We might  be able to instead run the package's `setup.py install`
-        # directly, but that only works if the packages stick with setup.py and
-        # don't migrate to pyproject.toml, and I don't want to become a blocker
-        # for that.
-        for pkg_src in self.py_pkg_deps:
-            shutil.copytree(pkg_src, self._staging / pkg_src.name)
-
-        zipapp.create_archive(
-            source=self._staging,
-            target=self._pyz_build_location,
-            main=self.main,
-            filter=self.zipapp_file_filter,
-        )
-
-    @staticmethod
-    def zipapp_file_filter(path: Path) -> bool:
-        if ".git" in path.parts:
-            return False
-        if "__pycache__" in path.parts:
-            return False
-        if ".mypy_cache" in path.parts:
-            return False
-        if ".pytest_cache" in path.parts:
-            return False
-        if path.suffix in {".pyc", ".pyo"}:
-            return False
-        return True
-
-    def install(self) -> None:
-        install_path = self.get_install_path()
-        install_path.parent.mkdir(parents=True, exist_ok=True)
-        shutil.copy(self._pyz_build_location, install_path)
-        self.create_launcher()
-
-    def create_launcher(self) -> None:
-        if self.host is Host.Windows64:
-            self.create_cmd_launcher()
-        else:
-            self.create_bash_launcher()
-
-    def create_cmd_launcher(self) -> None:
-        self.get_install_path().with_name(f"{self.name}.cmd").write_text(
-            textwrap.dedent(
-                f"""\
-                @echo off
-                setlocal
-                set ANDROID_NDK_PYTHON=%~dp0..\\..\\..\\toolchains\\llvm\\prebuilt\\windows-x86_64\\python3\\python.exe
-                set SHELL=cmd
-                "%ANDROID_NDK_PYTHON%" -u "%~dp0{self.get_install_path().name}" %*
-                """
-            )
-        )
-
-    def create_bash_launcher(self) -> None:
-        launcher = self.get_install_path().with_name(self.name)
-        launcher.write_text(
-            textwrap.dedent(
-                f"""\
-                #!/usr/bin/env bash
-                THIS_DIR=$(cd "$(dirname "$0")" && pwd)
-                ANDROID_NDK_ROOT=$(cd "$THIS_DIR/../../.." && pwd)
-                . "$ANDROID_NDK_ROOT/build/tools/ndk_bin_common.sh"
-                "$ANDROID_NDK_PYTHON" "$THIS_DIR/{self.get_install_path().name}" "$@"
-                """
-            )
-        )
-        mode = launcher.stat().st_mode
-        launcher.chmod(mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
-
-    @property
-    def _staging(self) -> Path:
-        return self.intermediate_out_dir / self.name
-
-    @property
-    def _pyz_build_location(self) -> Path:
-        return self.intermediate_out_dir / self.get_install_path().name
+    subprocess.check_call(['python3', ndk.paths.android_path(script)] + args)
 
 
-class LintModule(Module):
-    def build(self) -> None:
-        self.run()
+def common_build_args(out_dir: str, dist_dir: str,
+                      host: ndk.hosts.Host) -> List[str]:
+    """Returns a list of common arguments for build.py scripts.
 
-    def install(self) -> None:
-        pass
+    Modules that have not been fully merged into checkbuild.py still use a
+    separately executed build.py script via InvokeBuildModule or
+    InvokeExternalBuildModule. These have a common command line interface for
+    determining out directories and target host.
 
-    def run(self) -> None:
-        raise NotImplementedError
+    Args:
+        out_dir: Base out directory for the target host.
+        dist_dir: Distribution directory for archived artifacts.
+        host: Target host.
+
+    Returns:
+        List of command line arguments to be used with build.py.
+    """
+    return [
+        f'--out-dir={os.path.join(out_dir, host.value)}',
+        f'--dist-dir={dist_dir}',
+        f'--host={host.value}',
+    ]
 
 
-class MetaModule(Module):
-    def build(self) -> None:
-        pass
-
-    def install(self) -> None:
-        pass
-
-
-def install_directory(src: Path, dst: Path) -> None:
+def install_directory(src: str, dst: str) -> None:
     """Copies a directory to an install location, ignoring some file types.
 
     The destination will be removed prior to copying if it exists, ensuring a
@@ -668,7 +661,44 @@
     # all these file types, and we should also do this before packaging since
     # packaging only runs when a full NDK is built (fine for the build servers,
     # could potentially be wrong for local testing).
-    if dst.exists():
+    if os.path.exists(dst):
         shutil.rmtree(dst)
-    ignore_patterns = shutil.ignore_patterns("*.pyc", "*.pyo", "*.swp", "*.git*")
+    ignore_patterns = shutil.ignore_patterns(
+        '*.pyc', '*.pyo', '*.swp', '*.git*')
     shutil.copytree(src, dst, ignore=ignore_patterns)
+
+
+def make_repo_prop(out_dir: str) -> None:
+    """Installs a repro.prop file to the given directory.
+
+    A repo.prop file is a file listing all of the git projects used and their
+    checked out revisions. i.e.
+
+        platform/bionic 40538268d43d82409a93637960f2da3c1226840a
+        platform/development 688f15246399db98897e660889d9a202559fe5d8
+        ...
+
+    Historically we installed one of these per "module" (from the attempted
+    modular NDK), but since the same information can be retrieved from the
+    build number we do not install them for most things now.
+
+    If this build is happening on the build server then there will be a
+    repo.prop file in the DIST_DIR for us to copy, otherwise we generate our
+    own.
+    """
+    # TODO: Finish removing users of this in favor of installing a single
+    # manifest.xml file in the root of the NDK.
+    file_name = 'repo.prop'
+
+    dist_dir = os.environ.get('DIST_DIR')
+    if dist_dir is not None:
+        dist_repo_prop = os.path.join(dist_dir, file_name)
+        shutil.copy(dist_repo_prop, out_dir)
+    else:
+        out_file = os.path.join(out_dir, file_name)
+        with open(out_file, 'w') as prop_file:
+            cmd = [
+                'repo', 'forall', '-c',
+                'echo $REPO_PROJECT $(git rev-parse HEAD)',
+            ]
+            subprocess.check_call(cmd, stdout=prop_file)
diff --git a/ndk/checkbuild.py b/ndk/checkbuild.py
index 44a32eb..2aa91b4 100755
--- a/ndk/checkbuild.py
+++ b/ndk/checkbuild.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2015 The Android Open Source Project
 #
@@ -14,295 +14,155 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Builds the NDK.
+"""Verifies that the build is sane.
 
 Cleans old build artifacts, configures the required environment, determines
 build goals, and invokes the build scripts.
 """
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
 import argparse
 import collections
 import contextlib
 import copy
+# pylint: disable=import-error,no-name-in-module
+# https://github.com/PyCQA/pylint/issues/73
+from distutils.dir_util import copy_tree
+# pylint: enable=import-error,no-name-in-module
+import glob
 import inspect
 import json
 import logging
 import multiprocessing
 import os
+from pathlib import Path
+import pipes
+import pprint
 import re
 import shutil
 import site
 import stat
 import subprocess
 import sys
+import tempfile
 import textwrap
 import traceback
-from collections.abc import Sequence
-from pathlib import Path
 from typing import (
     Any,
     Callable,
-    ContextManager,
     Dict,
     Iterable,
     Iterator,
     List,
+    Optional,
     Set,
     TextIO,
     Tuple,
+    Union,
 )
 
+from build.lib import build_support
 import ndk.abis
 import ndk.ansi
-import ndk.archive
 import ndk.autoconf
 import ndk.builds
-import ndk.cmake
 import ndk.config
 import ndk.deps
+import ndk.ext.shutil
+import ndk.file
+import ndk.hosts
 import ndk.notify
 import ndk.paths
 import ndk.test.builder
 import ndk.test.printers
 import ndk.test.spec
 import ndk.timer
+import ndk.toolchains
 import ndk.ui
 import ndk.workqueue
-from ndk.abis import ALL_ABIS, Abi
-from ndk.crtobjectbuilder import CrtObjectBuilder
-from ndk.hosts import Host
-from ndk.paths import ANDROID_DIR, NDK_DIR, PREBUILT_SYSROOT
-from ndk.platforms import ALL_API_LEVELS, API_LEVEL_ALIASES, MAX_API_LEVEL
-from ndk.toolchains import CLANG_VERSION, ClangToolchain
-
-from .ndkversionheadergenerator import NdkVersionHeaderGenerator
-from .pythonenv import ensure_python_environment
 
 
-def get_version_string(build_number: int) -> str:
-    """Returns the version string for the current build."""
-    return f"{ndk.config.major}.{ndk.config.hotfix}.{build_number}"
+def _make_tar_package(package_path: str, base_dir: str, path: str) -> str:
+    """Creates a tarball package for distribution.
+
+    Args:
+        package_path (string): Path (without extention) to the output archive.
+        base_dir (string): Path to the directory from which to perform the
+                           packaging (identical to tar's -C).
+        path (string): Path to the directory to package.
+    """
+    has_pbzip2 = shutil.which('pbzip2') is not None
+    if has_pbzip2:
+        compress_arg = '--use-compress-prog=pbzip2'
+    else:
+        compress_arg = '-j'
+
+    package_path = package_path + '.tar.bz2'
+    cmd = ['tar', compress_arg, '-cf', package_path, '-C', base_dir, path]
+    subprocess.check_call(cmd)
+    return package_path
+
+
+def _make_zip_package(package_path: str, base_dir: str, path: str) -> str:
+    """Creates a zip package for distribution.
+
+    Args:
+        package_path (string): Path (without extention) to the output archive.
+        base_dir (string): Path to the directory from which to perform the
+                           packaging (identical to tar's -C).
+        path (string): Path to the directory to package.
+    """
+    cwd = os.getcwd()
+    package_path = os.path.realpath(package_path) + '.zip'
+    os.chdir(base_dir)
+    try:
+        subprocess.check_call(['zip', '-9qr', package_path, path])
+        return package_path
+    finally:
+        os.chdir(cwd)
 
 
 def purge_unwanted_files(ndk_dir: Path) -> None:
     """Removes unwanted files from the NDK install path."""
 
-    for path in ndk.paths.walk(ndk_dir, directories=False):
-        if path.suffix == ".pyc":
-            path.unlink()
-        elif path.name == "Android.bp":
-            path.unlink()
+    for path, _dirs, files in os.walk(ndk_dir):
+        for file_name in files:
+            file_path = Path(path) / file_name
+            if file_name.endswith('.pyc'):
+                file_path.unlink()
+            elif file_name == 'Android.bp':
+                file_path.unlink()
 
 
-def make_symlink(src: Path, dest: Path) -> None:
-    src.unlink(missing_ok=True)
-    if dest.is_absolute():
-        src.symlink_to(Path(os.path.relpath(dest, src.parent)))
-    else:
-        src.symlink_to(dest)
-
-
-def create_stub_entry_point(path: Path) -> None:
-    """Creates a stub "application" for the app bundle.
-
-    App bundles must have at least one entry point in the Contents/MacOS
-    directory. We don't have a single entry point, and none of our executables
-    are useful if moved, so just put a welcome script in place that explains
-    that.
-    """
-    path.parent.mkdir(exist_ok=True, parents=True)
-    path.write_text(
-        textwrap.dedent(
-            """\
-            #!/bin/sh
-            echo "The Android NDK is installed to the Contents/NDK directory of this application bundle."
-            """
-        )
-    )
-    path.chmod(0o755)
-
-
-def create_plist(plist: Path, version: str, entry_point_name: str) -> None:
-    """Populates the NDK plist at the given location."""
-    plist.write_text(
-        textwrap.dedent(
-            f"""\
-            <?xml version="1.0" encoding="UTF-8"?>
-            <!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-            <plist version="1.0">
-            <dict>
-                <key>CFBundleName</key>
-                <string>Android NDK</string>
-                <key>CFBundleDisplayName</key>
-                <string>Android NDK</string>
-                <key>CFBundleIdentifier</key>
-                <string>com.android.ndk</string>
-                <key>CFBundleVersion</key>
-                <string>{version}</string>
-                <key>CFBundlePackageType</key>
-                <string>APPL</string>
-                <key>CFBundleExecutable</key>
-                <string>{entry_point_name}</string>
-            </dict>
-            </plist>
-            """
-        )
-    )
-
-
-def create_signer_metadata(package_dir: Path) -> None:
-    """Populates the _codesign metadata directory for the ADRT signer.
-
-    Args:
-        package_dir: Path to the root of the directory that will be zipped for
-                     the signer.
-    """
-    metadata_dir = package_dir / "_codesign"
-    metadata_dir.mkdir()
-
-    # This directory can optionally contain a few pieces of metadata:
-    #
-    # filelist: For any jar files that need to be unpacked and signed. We have
-    # none.
-    #
-    # entitlements.xml: Defines any entitlements we need. No known, currently.
-    #
-    # volumename: The volume name for the DMG that the signer will create.
-    #
-    # See http://go/studio-signer for more information.
-
-    volumename_file = metadata_dir / "volumename"
-    volumename_file.write_text(f"Android NDK {ndk.config.release}")
-
-
-def make_app_bundle(
-    worker: ndk.workqueue.Worker,
-    zip_path: Path,
-    ndk_dir: Path,
-    build_number: int,
-    build_dir: Path,
-) -> None:
-    """Builds a macOS App Bundle of the NDK.
-
-    The NDK is distributed in two forms on macOS: as a app bundle and in the
-    traditional layout. The traditional layout is needed by the SDK because AGP
-    and Studio expect the NDK to be contained one directory down in the
-    archive, which is not compatible with macOS bundles. The app bundle is
-    needed on macOS because we rely on rpaths, and executables using rpaths are
-    blocked by Gate Keeper as of macOS Catalina (10.15), except for references
-    within the same bundle.
-
-    Information on the macOS bundle format can be found at
-    https://developer.apple.com/library/archive/documentation/CoreFoundation/Conceptual/CFBundles/BundleTypes/BundleTypes.html.
-
-    Args:
-        zip_path: The desired file path of the resultant zip file (without the
-                  extension).
-        ndk_dir: The path to the NDK being bundled.
-        build_dir: The path to the top level build directory.
-    """
-    worker.status = "Packaging MacOS App Bundle"
-    package_dir = build_dir / "bundle"
-    app_directory_name = f"AndroidNDK{build_number}.app"
-    bundle_dir = package_dir / app_directory_name
-    if package_dir.exists():
-        shutil.rmtree(package_dir)
-
-    contents_dir = bundle_dir / "Contents"
-    entry_point_name = "ndk"
-    create_stub_entry_point(contents_dir / "MacOS" / entry_point_name)
-
-    bundled_ndk = contents_dir / "NDK"
-    shutil.copytree(ndk_dir, bundled_ndk, symlinks=True)
-
-    plist = contents_dir / "Info.plist"
-    create_plist(plist, get_version_string(build_number), entry_point_name)
-
-    shutil.copy2(ndk_dir / "source.properties", package_dir / "source.properties")
-    create_signer_metadata(package_dir)
-    ndk.archive.make_zip(
-        zip_path,
-        package_dir,
-        [p.name for p in package_dir.iterdir()],
-        preserve_symlinks=True,
-    )
-
-
-def make_brtar(
-    worker: ndk.workqueue.Worker,
-    base_name: Path,
-    root_dir: Path,
-    base_dir: Path,
-    preserve_symlinks: bool,
-) -> None:
-    worker.status = "Packaging .tar.br"
-    ndk.archive.make_brtar(
-        base_name, root_dir, base_dir, preserve_symlinks=preserve_symlinks
-    )
-
-
-def make_zip(
-    worker: ndk.workqueue.Worker,
-    base_name: Path,
-    root_dir: Path,
-    paths: List[str],
-    preserve_symlinks: bool,
-) -> None:
-    worker.status = "Packaging .zip"
-    ndk.archive.make_zip(
-        base_name, root_dir, paths, preserve_symlinks=preserve_symlinks
-    )
-
-
-def package_ndk(
-    ndk_dir: Path, out_dir: Path, dist_dir: Path, host: Host, build_number: int
-) -> Path:
+def package_ndk(ndk_dir: str, dist_dir: str, host_tag: str,
+                build_number: str) -> str:
     """Packages the built NDK for distribution.
 
     Args:
-        ndk_dir: Path to the built NDK.
-        out_dir: Path to use for constructing any intermediate outputs.
-        dist_dir: Path to place the built package in.
-        host: Host the given NDK was built for.
-        build_number: Build number to use in the package name.
+        ndk_dir (string): Path to the built NDK.
+        dist_dir (string): Path to place the built package in.
+        host_tag (string): Host tag to use in the package name,
+        build_number (printable): Build number to use in the package name. Will
+                                  be 'dev' if the argument evaluates to False.
     """
-    package_name = f"android-ndk-{build_number}-{host.tag}"
-    package_path = dist_dir / package_name
+    package_name = 'android-ndk-{}-{}'.format(build_number, host_tag)
+    package_path = os.path.join(dist_dir, package_name)
 
-    purge_unwanted_files(ndk_dir)
+    purge_unwanted_files(Path(ndk_dir))
 
-    workqueue: ndk.workqueue.WorkQueue = ndk.workqueue.WorkQueue()
-    try:
-        if host == Host.Darwin:
-            workqueue.add_task(
-                make_app_bundle,
-                dist_dir / f"android-ndk-{build_number}-app-bundle",
-                ndk_dir,
-                build_number,
-                out_dir,
-            )
-        workqueue.add_task(
-            make_brtar,
-            package_path,
-            ndk_dir.parent,
-            Path(ndk_dir.name),
-            preserve_symlinks=(host != Host.Windows64),
-        )
-        workqueue.add_task(
-            make_zip,
-            package_path,
-            ndk_dir.parent,
-            [ndk_dir.name],
-            preserve_symlinks=(host != Host.Windows64),
-        )
-        ndk.ui.finish_workqueue_with_ui(workqueue, ndk.ui.get_build_progress_ui)
-    finally:
-        workqueue.terminate()
-        workqueue.join()
-    # TODO: Treat the .tar.br archive as authoritative and return its path.
-    return package_path.with_suffix(".zip")
+    base_dir = os.path.dirname(ndk_dir)
+    package_files = os.path.basename(ndk_dir)
+    if host_tag.startswith('windows'):
+        return _make_zip_package(package_path, base_dir, package_files)
+    else:
+        return _make_tar_package(package_path, base_dir, package_files)
 
 
-def build_ndk_tests(out_dir: Path, dist_dir: Path, args: argparse.Namespace) -> bool:
+def build_ndk_tests(out_dir: str, dist_dir: str,
+                    args: argparse.Namespace) -> bool:
     """Builds the NDK tests.
 
     Args:
@@ -317,121 +177,106 @@
     # packaging. This directory is not cleaned up after packaging, so we can
     # reuse that for testing.
     ndk_dir = ndk.paths.get_install_path(out_dir)
-    test_src_dir = ndk.paths.ndk_path("tests")
-    test_out_dir = out_dir / "tests"
+    test_src_dir = ndk.paths.ndk_path('tests')
+    test_out_dir = os.path.join(out_dir, 'tests')
 
-    site.addsitedir(str(ndk_dir / "python-packages"))
+    site.addsitedir(os.path.join(ndk_dir, 'python-packages'))
 
     test_options = ndk.test.spec.TestOptions(
-        test_src_dir,
-        ndk_dir,
-        test_out_dir,
-        clean=True,
-        package_path=(
-            Path(dist_dir).joinpath("ndk-tests") if args.package_tests else None
-        ),
-    )
+        test_src_dir, ndk_dir, test_out_dir, clean=True)
 
     printer = ndk.test.printers.StdoutPrinter()
+    with open(ndk.paths.ndk_path('qa_config.json')) as config_file:
+        test_config = json.load(config_file)
 
-    test_spec = ndk.test.spec.TestSpec.load(ndk.paths.ndk_path("qa_config.json"))
-    builder = ndk.test.builder.TestBuilder(test_spec, test_options, printer)
+    if args.arch is not None:
+        test_config['abis'] = ndk.abis.arch_to_abis(args.arch)
+
+    test_spec = ndk.test.builder.test_spec_from_config(test_config)
+    builder = ndk.test.builder.TestBuilder(
+        test_spec, test_options, printer)
 
     report = builder.build()
     printer.print_summary(report)
 
-    if not report.successful:
+    if report.successful:
+        print('Packaging tests...')
+        package_path = os.path.join(dist_dir, 'ndk-tests')
+        _make_tar_package(package_path, out_dir, 'tests/dist')
+    else:
         # Write out the result to logs/build_error.log so we can find the
         # failure easily on the build server.
-        log_path = dist_dir / "logs" / "build_error.log"
-        with log_path.open("a", encoding="utf-8") as error_log:
+        log_path = os.path.join(dist_dir, 'logs/build_error.log')
+        with open(log_path, 'a') as error_log:
             error_log_printer = ndk.test.printers.FilePrinter(error_log)
             error_log_printer.print_summary(report)
 
     return report.successful
 
 
-def install_file(file_name: str, src_dir: Path, dst_dir: Path) -> None:
-    src_file = src_dir / file_name
-    dst_file = dst_dir / file_name
+def install_file(file_name: str, src_dir: str, dst_dir: str) -> None:
+    src_file = os.path.join(src_dir, file_name)
+    dst_file = os.path.join(dst_dir, file_name)
 
-    print("Copying {} to {}...".format(src_file, dst_file))
-    if src_file.is_dir():
+    print('Copying {} to {}...'.format(src_file, dst_file))
+    if os.path.isdir(src_file):
         _install_dir(src_file, dst_file)
-    elif src_file.is_symlink():
+    elif os.path.islink(src_file):
         _install_symlink(src_file, dst_file)
     else:
         _install_file(src_file, dst_file)
 
 
-def _install_dir(src_dir: Path, dst_dir: Path) -> None:
-    parent_dir = dst_dir.parent
-    if not parent_dir.exists():
-        parent_dir.mkdir(parents=True)
+def _install_dir(src_dir: str, dst_dir: str) -> None:
+    parent_dir = os.path.normpath(os.path.join(dst_dir, '..'))
+    if not os.path.exists(parent_dir):
+        os.makedirs(parent_dir)
     shutil.copytree(src_dir, dst_dir, symlinks=True)
 
 
-def _install_symlink(src_file: Path, dst_file: Path) -> None:
-    dirname = dst_file.parent
-    if not dirname.exists():
-        dirname.mkdir(parents=True)
+def _install_symlink(src_file: str, dst_file: str) -> None:
+    dirname = os.path.dirname(dst_file)
+    if not os.path.exists(dirname):
+        os.makedirs(dirname)
     link_target = os.readlink(src_file)
     os.symlink(link_target, dst_file)
 
 
-def _install_file(src_file: Path, dst_file: Path) -> None:
-    dirname = dst_file.parent
-    if not dirname.exists():
-        dirname.mkdir(parents=True)
+def _install_file(src_file: str, dst_file: str) -> None:
+    dirname = os.path.dirname(dst_file)
+    if not os.path.exists(dirname):
+        os.makedirs(dirname)
     # copy2 is just copy followed by copystat (preserves file metadata).
     shutil.copy2(src_file, dst_file)
 
 
-ALL_MODULE_TYPES: list[type[ndk.builds.Module]] = []
-
-
-def register(module_class: type[ndk.builds.Module]) -> type[ndk.builds.Module]:
-    ALL_MODULE_TYPES.append(module_class)
-    return module_class
-
-
-@register
 class Clang(ndk.builds.Module):
-    name = "clang"
-    install_path = Path("toolchains/llvm/prebuilt/{host}")
+    name = 'clang'
+    path = 'toolchains/llvm/prebuilt/{host}'
     notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
 
     @property
-    def notices(self) -> Iterator[Path]:
-        # TODO: Inject Host before this runs and remove this hack.
-        # Just skip the license checking for dev builds. Without this the build
-        # will fail because there's only a clang-dev for one of the hosts.
-        if CLANG_VERSION == "clang-dev":
-            return
-        for host in Host:
-            yield ClangToolchain.path_for_host(host) / "NOTICE"
+    def notices(self) -> List[str]:
+        # TODO: Why every host?
+        return [
+            str(ndk.toolchains.ClangToolchain.path_for_host(h) / 'NOTICE')
+            for h in ndk.hosts.Host
+        ]
 
     def build(self) -> None:
         pass
 
     def install(self) -> None:
         install_path = self.get_install_path()
-        bin_dir = install_path / "bin"
 
-        if install_path.exists():
+        install_parent = os.path.dirname(install_path)
+        if os.path.exists(install_path):
             shutil.rmtree(install_path)
-        if not install_path.parent.exists():
-            install_path.parent.mkdir(parents=True)
+        if not os.path.exists(install_parent):
+            os.makedirs(install_parent)
         shutil.copytree(
-            ClangToolchain.path_for_host(self.host),
-            install_path,
-            symlinks=not self.host.is_windows,
-        )
-
-        # The prebuilt Linux Clangs include a bazel file for some other users.
-        # We don't need or test this interface so we shouldn't ship it.
-        if self.host is Host.Linux:
-            (install_path / "BUILD.bazel").unlink()
+            ndk.toolchains.ClangToolchain.path_for_host(self.host),
+            install_path)
 
         # clang-4053586 was patched in the prebuilts directory to add the
         # libc++ includes. These are almost certainly a different revision than
@@ -439,772 +284,1476 @@
         # and vice versa. Best to just remove them for the time being since
         # that returns to the previous behavior.
         # https://github.com/android-ndk/ndk/issues/564#issuecomment-342307128
-        shutil.rmtree(install_path / "include")
+        cxx_includes_path = os.path.join(install_path, 'include')
+        shutil.rmtree(cxx_includes_path)
 
-        if self.host is Host.Linux:
-            # The Linux toolchain wraps the compiler to inject some behavior
-            # for the platform. They aren't used for every platform and we want
-            # consistent behavior across platforms, and we also don't want the
-            # extra cost they incur (fork/exec is cheap, but CreateProcess is
-            # expensive), so remove them.
-            assert set(bin_dir.glob("*.real")) == {
-                bin_dir / "clang++.real",
-                bin_dir / "clang.real",
-                bin_dir / "clang-tidy.real",
-            }
-            (bin_dir / "clang++.real").unlink()
-            (bin_dir / "clang++").unlink()
-            (bin_dir / "clang-cl").unlink()
-            (bin_dir / "clang-tidy").unlink()
-            (bin_dir / "clang.real").rename(bin_dir / "clang")
-            (bin_dir / "clang-tidy.real").rename(bin_dir / "clang-tidy")
-            make_symlink(bin_dir / "clang++", Path("clang"))
+        if not self.host.is_windows:
+            # The Linux and Darwin toolchains have Python compiler wrappers
+            # that currently do nothing. We don't have these for Windows and we
+            # want to make sure Windows behavior is consistent with the other
+            # platforms, so just unwrap the compilers until they do something
+            # useful and are available on Windows.
+            os.rename(os.path.join(install_path, 'bin/clang.real'),
+                      os.path.join(install_path, 'bin/clang'))
+            os.rename(os.path.join(install_path, 'bin/clang++.real'),
+                      os.path.join(install_path, 'bin/clang++'))
 
-        bin_ext = ".exe" if self.host.is_windows else ""
-        if self.host.is_windows:
-            # Remove LLD duplicates. We only need ld.lld. For non-Windows these
-            # are all symlinks so we can keep them (and *need* to keep lld
-            # since that's the real binary).
-            # http://b/74250510
-            (bin_dir / f"ld64.lld{bin_ext}").unlink()
-            (bin_dir / f"lld{bin_ext}").unlink()
-            (bin_dir / f"lld-link{bin_ext}").unlink()
+            # The prebuilts have symlinks pointing at a clang-MAJ.MIN binary,
+            # but we replace symlinks with standalone copies, so remove this
+            # copy to save space.
+            bin_dir = os.path.join(install_path, 'bin')
+            (clang_maj_min,) = glob.glob(os.path.join(bin_dir, 'clang-?'))
+            os.remove(clang_maj_min)
 
-        install_clanglib = install_path / "lib/clang"
-        linux_prebuilt_path = ClangToolchain.path_for_host(Host.Linux)
+        # Remove LLD duplicates. We only need ld.lld.
+        # http://b/74250510
+        #
+        # Note that lld is experimental in the NDK. It is not the default for
+        # any architecture and has received only minimal testing in the NDK.
+        bin_ext = '.exe' if self.host.is_windows else ''
+        os.remove(os.path.join(install_path, 'bin/ld64.lld' + bin_ext))
+        os.remove(os.path.join(install_path, 'bin/lld' + bin_ext))
+        os.remove(os.path.join(install_path, 'bin/lld-link' + bin_ext))
 
-        # Remove unused python scripts. They are not installed for Windows.
-        if self.host != Host.Windows64:
-            python_bin_dir = install_path / "python3" / "bin"
-            python_files_to_remove = [
-                "2to3*",
-                "easy_install*",
-                "idle*",
-                "pip*",
-                "pydoc*",
-                "python*-config",
-            ]
-            for file_pattern in python_files_to_remove:
-                for pyfile in python_bin_dir.glob(file_pattern):
-                    pyfile.unlink()
+        # Remove LLDB before it is ready for use.
+        os.remove(os.path.join(install_path, 'bin/lldb' + bin_ext))
 
-        if self.host != Host.Linux:
-            # We don't build target binaries as part of the Darwin or Windows build.
-            # These toolchains need to get these from the Linux prebuilts.
+        install_clanglib = os.path.join(install_path, 'lib64', 'clang')
+        linux_prebuilt_path = ndk.toolchains.ClangToolchain.path_for_host(
+            ndk.hosts.Host.Linux)
+
+        if self.host != ndk.hosts.Host.Linux:
+            # We don't build target binaries as part of the Darwin or Windows
+            # build. These toolchains need to get these from the Linux
+            # prebuilts.
             #
-            # The headers and libraries we care about are all in lib/clang for both
-            # toolchains, and those two are intended to be identical between each host,
-            # so we can just replace them with the one from the Linux toolchain.
+            # The headers and libraries we care about are all in lib64/clang
+            # for both toolchains, and those two are intended to be identical
+            # between each host, so we can just replace them with the one from
+            # the Linux toolchain.
+            linux_clanglib = linux_prebuilt_path / 'lib64/clang'
             shutil.rmtree(install_clanglib)
-            shutil.copytree(
-                linux_prebuilt_path / "lib/clang",
-                install_clanglib,
-                symlinks=self.host is not Host.Windows64,
-            )
+            shutil.copytree(linux_clanglib, install_clanglib)
 
-        # The toolchain build creates a symlink to easy migration across versions in the
-        # platform build. It's just confusing and wasted space in the NDK. Purge it.
-        for path in install_clanglib.iterdir():
-            if path.is_symlink():
-                path.unlink()
-
-        # The Clang prebuilts have the platform toolchain libraries in lib/clang. The
-        # libraries we want are in runtimes_ndk_cxx.
-        ndk_runtimes = linux_prebuilt_path / "runtimes_ndk_cxx"
-        for version_dir in install_clanglib.iterdir():
-            dst_lib_dir = version_dir / "lib/linux"
+        # The Clang prebuilts have the platform toolchain libraries in
+        # lib64/clang. The libraries we want are in runtimes_ndk_cxx.
+        ndk_runtimes = linux_prebuilt_path / 'runtimes_ndk_cxx'
+        versions = os.listdir(install_clanglib)
+        for version in versions:
+            version_dir = os.path.join(install_clanglib, version)
+            dst_lib_dir = os.path.join(version_dir, 'lib/linux')
             shutil.rmtree(dst_lib_dir)
             shutil.copytree(ndk_runtimes, dst_lib_dir)
 
-            # Create empty libatomic.a stub libraries to keep -latomic working.
-            # This is needed for backwards compatibility and might be useful if
-            # upstream LLVM splits out the __atomic_* APIs from the builtins.
-            for arch in ndk.abis.ALL_ARCHITECTURES:
-                # Only the arch-specific subdir is on the linker search path.
-                subdir = {
-                    ndk.abis.Arch("arm"): "arm",
-                    ndk.abis.Arch("arm64"): "aarch64",
-                    ndk.abis.Arch("riscv64"): "riscv64",
-                    ndk.abis.Arch("x86"): "i386",
-                    ndk.abis.Arch("x86_64"): "x86_64",
-                }[arch]
-                (dst_lib_dir / subdir / "libatomic.a").write_text(
-                    textwrap.dedent(
-                        """\
-                    /* The __atomic_* APIs are now in libclang_rt.builtins-*.a. They might
-                       eventually be broken out into a separate library -- see llvm.org/D47606. */
-                    """
-                    )
-                )
-
-        # Remove duplicate install locations of some runtime libraries. The toolchain
-        # artifacts install these to a location the driver doesn't search. We relocate
-        # these as necessary (either in this class or in Toolchain), so clean up the
-        # excess. The Android runtimes are only packaged in the Linux toolchain.
-        if self.host == Host.Linux:
-            shutil.rmtree(install_path / "runtimes_ndk_cxx")
-            shutil.rmtree(install_path / "android_libc++")
+        # Also remove the other libraries that we installed, but they were only
+        # installed on Linux.
+        if self.host == ndk.hosts.Host.Linux:
+            shutil.rmtree(os.path.join(install_path, 'runtimes_ndk_cxx'))
 
         # Remove CMake package files that should not be exposed.
         # For some reason the LLVM install includes CMake modules that expose
         # its internal APIs. We want to purge these so apps don't accidentally
         # depend on them. See http://b/142327416 for more info.
-        shutil.rmtree(install_path / "lib/cmake")
-
-        # Remove libc++.a and libc++abi.a on Darwin. Now that these files are
-        # universal binaries, they break notarization. Maybe it is possible to
-        # fix notarization by using ditto to preserve APFS extended attributes.
-        # See https://developer.apple.com/forums/thread/126038.
-        if self.host == Host.Darwin:
-            (install_path / "lib/libc++.a").unlink()
-            (install_path / "lib/libc++abi.a").unlink()
-
-        # Strip some large binaries and libraries. This is awkward, hand-crafted
-        # logic to select most of the biggest offenders, but could be
-        # greatly improved, although handling Mac, Windows, and Linux
-        # elegantly and consistently is a bit tricky.
-        strip_cmd = ClangToolchain(Host.current()).strip
-        for file in ndk.paths.walk(bin_dir, directories=False):
-            if not file.is_file() or file.is_symlink():
-                continue
-            if Host.current().is_windows:
-                if file.suffix == ".exe":
-                    subprocess.check_call([str(strip_cmd), str(file)])
-            elif file.stat().st_size > 100000:
-                subprocess.check_call([str(strip_cmd), str(file)])
-        for file in ndk.paths.walk(install_clanglib, directories=False):
-            if not file.is_file() or file.is_symlink():
-                continue
-            if file.name == "lldb-server":
-                subprocess.check_call([str(strip_cmd), str(file)])
-            if file.name.startswith("libLTO.") or file.name.startswith("liblldb."):
-                subprocess.check_call([str(strip_cmd), "--strip-unneeded", str(file)])
-
-        # These exist for plugin support and library use, but neither of those
-        # are supported workflows for the NDK, so they're just dead weight.
-        #
-        # They don't exist on Windows though.
-        if self.host is not Host.Windows64:
-            lib_ext = ".dylib" if self.host is Host.Darwin else ".so"
-            (install_path / "lib" / "libclang").with_suffix(lib_ext).unlink()
-            (install_path / "lib" / "libclang-cpp").with_suffix(lib_ext).unlink()
-            (install_path / "lib" / "libLLVM").with_suffix(lib_ext).unlink()
-            (install_path / "lib" / "libLTO").with_suffix(lib_ext).unlink()
-            if self.host is Host.Linux:
-                for library in (install_path / "lib").glob("libLLVM-*"):
-                    library.unlink()
-
-        for lib in (install_path / "lib").iterdir():
-            broken_symlinks = {
-                "libc++abi.so.1.0",
-                "libc++abi.so",
-                "libc++.so.1.0",
-            }
-
-            if lib.name in broken_symlinks:
-                self._check_and_remove_dangling_symlink(lib)
-
-    def _check_and_remove_dangling_symlink(self, path: Path) -> None:
-        """Removes an expected dangling symlink, or raises an error.
-
-        The latest LLVM prebuilts have some dangling symlinks. It's a bug on the LLVM
-        build side, but rather than wait for a respin we just clean up the problems
-        here. This will raise an error whenever we upgrade to a new toolchain that
-        doesn't have these problems, so we'll know when to remove the workaround.
-        """
-        if not path.is_symlink():
-            raise RuntimeError(
-                f"Expected {path} to be a symlink. Update or remove this workaround."
-            )
-        if (dest := path.readlink()).exists():
-            raise RuntimeError(
-                f"Expected {path} to be a dangling symlink, but {dest} exists. Update "
-                "or remove this workaround."
-            )
-
-        path.unlink()
+        cmake_modules_dir = os.path.join(install_path, 'lib64', 'cmake')
+        shutil.rmtree(cmake_modules_dir)
 
 
-def versioned_so(host: Host, lib: str, version: str) -> str:
+def get_gcc_prebuilt_path(host: ndk.hosts.Host, arch: ndk.abis.Arch) -> str:
+    """Returns the path to the GCC prebuilt for the given host/arch."""
+    host_tag = ndk.hosts.host_to_tag(host)
+    toolchain = ndk.abis.arch_to_toolchain(arch) + '-4.9'
+    rel_prebuilt_path = os.path.join(
+        'prebuilts/ndk/current/toolchains', host_tag, toolchain)
+    prebuilt_path = ndk.paths.android_path(rel_prebuilt_path)
+    if not os.path.isdir(prebuilt_path):
+        raise RuntimeError(
+            'Could not find prebuilt GCC at {}'.format(prebuilt_path))
+    return prebuilt_path
+
+
+def get_binutils_prebuilt_path(host: ndk.hosts.Host,
+                               arch: ndk.abis.Arch) -> str:
+    if host == ndk.hosts.Host.Windows64:
+        host_dir_name = 'win64'
+    else:
+        host_dir_name = host.value
+
+    binutils_name = f'binutils-{arch}-{host_dir_name}'
+    prebuilt_path = ndk.paths.android_path('prebuilts/ndk', 'binutils',
+                                           host_dir_name, binutils_name)
+    if not os.path.isdir(prebuilt_path):
+        raise RuntimeError(
+            f'Could not find prebuilt binutils at {prebuilt_path}')
+    return prebuilt_path
+
+
+def versioned_so(host: ndk.hosts.Host, lib: str, version: str) -> str:
     """Returns the formatted versioned library for the given host.
 
-    >>> versioned_so(Host.Darwin, 'libfoo', '0')
+    >>> versioned_so(ndk.hosts.Host.Darwin, 'libfoo', '0')
     'libfoo.0.dylib'
-    >>> versioned_so(Host.Linux, 'libfoo', '0')
+    >>> versioned_so(ndk.hosts.Host.Linux, 'libfoo', '0')
     'libfoo.so.0'
     """
-    if host is Host.Darwin:
-        return f"{lib}.{version}.dylib"
-    if host is Host.Linux:
-        return f"{lib}.so.{version}"
-    raise ValueError(f"Unsupported host: {host}")
+    if host == ndk.hosts.Host.Darwin:
+        return f'{lib}.{version}.dylib'
+    elif host == ndk.hosts.Host.Linux:
+        return f'{lib}.so.{version}'
+    raise ValueError(f'Unsupported host: {host}')
 
 
-@register
-class ShaderTools(ndk.builds.CMakeModule):
-    name = "shader-tools"
-    src = ANDROID_DIR / "external" / "shaderc" / "shaderc"
-    install_path = Path("shader-tools/{host}")
-    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
-    deps = {"clang"}
-
-    @property
-    def notices(self) -> Iterator[Path]:
-        base = ANDROID_DIR / "external/shaderc"
-        shaderc_dir = base / "shaderc"
-        glslang_dir = base / "glslang"
-        spirv_dir = base / "spirv-headers"
-        yield shaderc_dir / "LICENSE"
-        yield shaderc_dir / "third_party/LICENSE.spirv-tools"
-        yield glslang_dir / "LICENSE.txt"
-        yield spirv_dir / "LICENSE"
-
-    @property
-    def defines(self) -> Dict[str, str]:
-        gtest_dir = ANDROID_DIR / "external" / "googletest"
-        effcee_dir = ANDROID_DIR / "external" / "effcee"
-        re2_dir = ANDROID_DIR / "external" / "regex-re2"
-        spirv_headers_dir = self.src.parent / "spirv-headers"
-        defines = {
-            "SHADERC_EFFCEE_DIR": str(effcee_dir),
-            "SHADERC_RE2_DIR": str(re2_dir),
-            "SHADERC_GOOGLE_TEST_DIR": str(gtest_dir),
-            "SHADERC_THIRD_PARTY_ROOT_DIR": str(self.src.parent),
-            "EFFCEE_GOOGLETEST_DIR": str(gtest_dir),
-            "EFFCEE_RE2_DIR": str(re2_dir),
-            # SPIRV-Tools tests require effcee and re2.
-            # Don't enable RE2 testing because it's long and not useful to us.
-            "RE2_BUILD_TESTING": "OFF",
-            "SPIRV-Headers_SOURCE_DIR": str(spirv_headers_dir),
-        }
-        return defines
-
-    @property
-    def flags(self) -> List[str]:
-        return super().flags + [
-            "-Wno-unused-command-line-argument",
-            "-fno-rtti",
-            "-fno-exceptions",
-        ]
-
-    @property
-    def ldflags(self) -> List[str]:
-        ldflags = super().ldflags
-        if self.host == Host.Linux:
-            # Our libc++.so.1 re-exports libc++abi, and it will be installed in
-            # the same directory as the executables.
-            ldflags += ["-Wl,-rpath,\\$ORIGIN"]
-        if self.host == Host.Windows64:
-            # TODO: The shaderc CMake files already pass these options for
-            # gcc+mingw but not for clang+mingw. See
-            # https://github.com/android/ndk/issues/1464.
-            ldflags += ["-static", "-static-libgcc", "-static-libstdc++"]
-        return ldflags
-
-    @property
-    def env(self) -> Dict[str, str]:
-        # Sets path for libc++, for ctest.
-        if self.host == Host.Linux:
-            return {"LD_LIBRARY_PATH": str(self._libcxx_dir)}
-        return {}
-
-    @property
-    def _libcxx_dir(self) -> Path:
-        return self.get_dep("clang").get_build_host_install() / "lib"
-
-    @property
-    def _libcxx(self) -> List[Path]:
-        path = self._libcxx_dir
-        if self.host == Host.Linux:
-            return [path / "libc++.so"]
-        return []
-
-    def build(self) -> None:
-        # These have never behaved properly on Darwin. Local builds haven't worked in
-        # years (presumably an XCode difference), and now CI is failing because of the
-        # same libc++ mismatch as in
-        # https://android-review.googlesource.com/c/platform/ndk/+/2657073. The local
-        # build fails before the failure that happens in CI, so I can't test a fix for
-        # the CI issue. Just disable this until someone that's familiar with the tests
-        # has the time to fix them.
-        self.run_ctest = self.host is not Host.Darwin
-        super().build()
-
-    def install(self) -> None:
-        self.get_install_path().mkdir(parents=True, exist_ok=True)
-        ext = ".exe" if self.host.is_windows else ""
-        files_to_copy = [
-            f"glslc{ext}",
-            f"spirv-as{ext}",
-            f"spirv-dis{ext}",
-            f"spirv-val{ext}",
-            f"spirv-cfg{ext}",
-            f"spirv-opt{ext}",
-            f"spirv-link{ext}",
-            f"spirv-reduce{ext}",
-        ]
-        scripts_to_copy = ["spirv-lesspipe.sh"]
-
-        # Copy to install tree.
-        for src in files_to_copy + scripts_to_copy:
-            shutil.copy2(
-                self.builder.install_directory / "bin" / src, self.get_install_path()
-            )
-
-        # Symlink libc++ to install path.
-        for lib in self._libcxx:
-            symlink_name = self.get_install_path() / lib.name
-            make_symlink(symlink_name, lib)
+def install_gcc_lib(install_path: str, host: ndk.hosts.Host,
+                    arch: ndk.abis.Arch, subarch: str, lib_subdir: str,
+                    libname: str) -> None:
+    gcc_prebuilt = get_gcc_prebuilt_path(host, arch)
+    lib_install_dir = os.path.join(install_path, lib_subdir, subarch)
+    if not os.path.exists(lib_install_dir):
+        os.makedirs(lib_install_dir)
+    shutil.copy2(
+        os.path.join(gcc_prebuilt, lib_subdir, subarch, libname),
+        os.path.join(lib_install_dir, libname))
 
 
-@register
-class Make(ndk.builds.CMakeModule):
-    name = "make"
-    install_path = Path("prebuilt/{host}")
-    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
-    src = ANDROID_DIR / "toolchain/make"
-    deps = {"clang"}
-
-    @property
-    def notices(self) -> Iterator[Path]:
-        yield self.src / "COPYING"
+def install_gcc_crtbegin(install_path: str, host: ndk.hosts.Host,
+                         arch: ndk.abis.Arch, subarch: str) -> None:
+    triple = ndk.abis.arch_to_triple(arch)
+    subdir = os.path.join('lib/gcc', triple, '4.9.x')
+    install_gcc_lib(install_path, host, arch, subarch, subdir, 'crtbegin.o')
 
 
-@register
-class Yasm(ndk.builds.AutoconfModule):
-    name = "yasm"
-    install_path = Path("prebuilt/{host}")
-    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
-    src = ANDROID_DIR / "toolchain/yasm"
+def install_libgcc(install_path: str,
+                   host: ndk.hosts.Host,
+                   arch: ndk.abis.Arch,
+                   subarch: str,
+                   new_layout: bool = False) -> None:
+    triple = ndk.abis.arch_to_triple(arch)
+    subdir = os.path.join('lib/gcc', triple, '4.9.x')
+    install_gcc_lib(install_path, host, arch, subarch, subdir, 'libgcc.a')
 
-    @property
-    def notices(self) -> Iterator[Path]:
-        files = [
-            "Artistic.txt",
-            "BSD.txt",
-            "COPYING",
-            "GNU_GPL-2.0",
-            "GNU_LGPL-2.0",
-        ]
-        for name in files:
-            yield self.src / name
+    if new_layout:
+        # For all architectures, we want to ensure that libcompiler_rt-extras
+        # is linked when libgcc is linked. Some day this will be entirely
+        # replaced by compiler-rt, but for now we are still dependent on libgcc
+        # but still need some things from compiler_rt-extras.
+        #
+        # For ARM32 we need to use LLVM's libunwind rather than libgcc.
+        # Unfortunately we still use libgcc for the compiler builtins, so we we
+        # have to link both. To make sure that the LLVM unwinder gets used, add
+        # a linker script for libgcc to make sure that libunwind is placed
+        # first whenever libgcc is used. This also necessitates linking libdl
+        # since libunwind makes use of dl_iterate_phdr.
+        #
+        # Historically we dealt with this in the libc++ linker script, but
+        # since the new toolchain setup has the toolchain link the STL for us
+        # the correct way to use the static libc++ is to use
+        # `-static-libstdc++' which will expand to `-Bstatic -lc++ -Bshared`,
+        # which results in the static libdl being used. The stub implementation
+        # of libdl.a causes the unwind to fail, so we can't link libdl there.
+        # If we don't link it at all, linking fails when building a static
+        # executable since the driver does not link libdl when building a
+        # static executable.
+        #
+        # We only do this for the new toolchain layout since build systems
+        # using the legacy toolchain already needed to handle this, and
+        # -lunwind may not be valid in those configurations (it could have been
+        # linked by a full path instead).
+        libgcc_base_path = os.path.join(install_path, subdir, subarch)
+        libgcc_path = os.path.join(libgcc_base_path, 'libgcc.a')
+        libgcc_real_path = os.path.join(libgcc_base_path, 'libgcc_real.a')
+        shutil.move(libgcc_path, libgcc_real_path)
+        if arch == 'arm':
+            libs = '-lunwind -lcompiler_rt-extras -lgcc_real -ldl'
+        else:
+            libs = '-lcompiler_rt-extras -lgcc_real'
+        with open(libgcc_path, 'w') as script:
+            script.write('INPUT({})'.format(libs))
 
 
-@register
-class NdkWhich(ndk.builds.FileModule):
-    name = "ndk-which"
-    install_path = Path("prebuilt/{host}/bin/ndk-which")
-    src = NDK_DIR / "ndk-which"
+def install_libatomic(install_path: str, host: ndk.hosts.Host,
+                      arch: ndk.abis.Arch, subarch: str) -> None:
+    triple = ndk.abis.arch_to_triple(arch)
+    subdir = os.path.join(triple, 'lib64' if arch.endswith('64') else 'lib')
+    install_gcc_lib(install_path, host, arch, subarch, subdir, 'libatomic.a')
 
 
-def iter_python_lint_paths() -> Iterator[Path]:
-    ndk_package_path = Path("ndk")
-    yield ndk_package_path
-    for app in iter_python_app_modules():
-        if ndk_package_path not in app.package.parents:
-            yield app.package
+def get_subarches(arch: ndk.abis.Arch) -> List[str]:
+    if arch != ndk.abis.Arch('arm'):
+        return ['']
+
+    return [
+        '',
+        'thumb',
+        'armv7-a',
+        'armv7-a/thumb'
+    ]
 
 
-@register
-class Black(ndk.builds.LintModule):
-    name = "black"
-
-    def run(self) -> None:
-        if not shutil.which("black"):
-            logging.warning(
-                "Skipping format-checking. black was not found on your path."
-            )
-            return
-        subprocess.check_call(["black", "--check", "."])
-
-
-@register
-class Isort(ndk.builds.LintModule):
-    name = "isort"
-
-    def run(self) -> None:
-        if not shutil.which("isort"):
-            logging.warning("Skipping isort. isort was not found on your path.")
-            return
-        subprocess.check_call(["isort", "--check", "."])
-
-
-@register
-class Pylint(ndk.builds.LintModule):
-    name = "pylint"
-
-    def run(self) -> None:
-        if not shutil.which("pylint"):
-            logging.warning("Skipping linting. pylint was not found on your path.")
-            return
-        pylint: Sequence[str | Path] = [
-            "pylint",
-            "--rcfile=" + str(ANDROID_DIR / "ndk/pyproject.toml"),
-            "--score=n",
-            "build",
-            "tests",
-            *iter_python_lint_paths(),
-        ]
-        subprocess.check_call(pylint)
-
-
-@register
-class Mypy(ndk.builds.LintModule):
-    name = "mypy"
-
-    def run(self) -> None:
-        if not shutil.which("mypy"):
-            logging.warning("Skipping type-checking. mypy was not found on your path.")
-            return
-        subprocess.check_call(
-            [
-                "mypy",
-                "--config-file",
-                str(ANDROID_DIR / "ndk/pyproject.toml"),
-                *iter_python_lint_paths(),
-            ]
-        )
-
-
-@register
-class Pytest(ndk.builds.LintModule):
-    name = "pytest"
-    deps = {"ndk-stack", "ndk-stack-shortcut"}
-
-    def run(self) -> None:
-        if not shutil.which("pytest"):
-            logging.warning("Skipping pytest. pytest was not found on your path.")
-            return
-        subprocess.check_call(["pytest", "ndk", "tests/pytest"])
-
-
-@register
-class PythonLint(ndk.builds.MetaModule):
-    name = "pythonlint"
-    deps = {"black", "isort", "mypy", "pylint", "pytest"}
-
-
-@register
-class Toolbox(ndk.builds.Module):
-    name = "toolbox"
-    install_path = Path("prebuilt/{host}/bin")
-    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
-    notice = NDK_DIR / "sources/host-tools/toolbox/NOTICE"
-
-    def build_exe(self, src: Path, name: str) -> None:
-        toolchain = ClangToolchain(self.host)
-        cmd = [
-            str(toolchain.cc),
-            "-s",
-            "-o",
-            str(self.intermediate_out_dir / f"{name}.exe"),
-            str(src),
-        ] + toolchain.flags
-        subprocess.run(cmd, check=True)
-
-    def build(self) -> None:
-        if not self.host.is_windows:
-            print(f"Nothing to do for {self.host}")
-            return
-
-        self.intermediate_out_dir.mkdir(parents=True, exist_ok=True)
-
-        src_dir = NDK_DIR / "sources/host-tools/toolbox"
-        self.build_exe(src_dir / "echo_win.c", "echo")
-        self.build_exe(src_dir / "cmp_win.c", "cmp")
-
-    def install(self) -> None:
-        if not self.host.is_windows:
-            print(f"Nothing to do for {self.host}")
-            return
-
-        install_dir = self.get_install_path()
-        install_dir.mkdir(parents=True, exist_ok=True)
-
-        shutil.copy2(self.intermediate_out_dir / "echo.exe", install_dir)
-        shutil.copy2(self.intermediate_out_dir / "cmp.exe", install_dir)
-
-
-def install_exe(out_dir: Path, install_dir: Path, name: str, host: Host) -> None:
-    ext = ".exe" if host.is_windows else ""
-    exe_name = name + ext
-    src = out_dir / exe_name
-    dst = install_dir / exe_name
-
-    install_dir.mkdir(parents=True, exist_ok=True)
-    shutil.copy2(src, dst)
-
-
-def make_linker_script(path: Path, libs: List[str]) -> None:
-    path.write_text(f"INPUT({' '.join(libs)})\n")
-
-
-@register
-class LibShaderc(ndk.builds.Module):
-    name = "libshaderc"
-    install_path = Path("sources/third_party/shaderc")
-    src = ANDROID_DIR / "external/shaderc"
+class Binutils(ndk.builds.Module):
+    name = 'binutils'
+    path = 'toolchains/{toolchain}-4.9/prebuilt/{host}'
     notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
 
+    # TODO: Move GCC wrapper generation to Clang?
+    deps = {
+        'clang',
+    }
+
     @property
-    def notices(self) -> Iterator[Path]:
-        shaderc_dir = self.src / "shaderc"
-        glslang_dir = self.src / "glslang"
-        yield shaderc_dir / "LICENSE"
-        yield glslang_dir / "LICENSE.txt"
-        yield shaderc_dir / "third_party/LICENSE.spirv-tools"
+    def notices(self) -> List[str]:
+        notices = []
+        for host in ndk.hosts.ALL_HOSTS:
+            for arch in ndk.abis.ALL_ARCHITECTURES:
+                prebuilt_path = get_gcc_prebuilt_path(host, arch)
+                notices.append(os.path.join(prebuilt_path, 'NOTICE'))
+        return notices
 
     def build(self) -> None:
         pass
 
     def install(self) -> None:
+        for arch in self.arches:
+            self.install_arch(arch)
+
+    def install_arch(self, arch: ndk.abis.Arch) -> None:
+        install_path = self.get_install_path(arch=arch)
+        toolchain_path = get_binutils_prebuilt_path(self.host, arch)
+        ndk.builds.install_directory(toolchain_path, install_path)
+
+        # We still need libgcc/libatomic. Copy them from the old GCC prebuilts.
+        for subarch in get_subarches(arch):
+            install_libgcc(install_path, self.host, arch, subarch)
+            install_libatomic(install_path, self.host, arch, subarch)
+
+            # We don't actually want this, but Clang won't recognize a
+            # -gcc-toolchain without it.
+            install_gcc_crtbegin(install_path, self.host, arch, subarch)
+
+        # Copy the LLVMgold plugin into the binutils plugin directory so ar can
+        # use it.
+        if self.host == ndk.hosts.Host.Linux:
+            so = '.so'
+        elif self.host == ndk.hosts.Host.Darwin:
+            so = '.dylib'
+        else:
+            so = '.dll'
+
+        clang_prebuilts = self.get_dep('clang').get_install_path()
+        clang_bin = os.path.join(clang_prebuilts, 'bin')
+        clang_libs = os.path.join(clang_prebuilts, 'lib64')
+        llvmgold = os.path.join(clang_libs, 'LLVMgold' + so)
+
+        bfd_plugins = os.path.join(install_path, 'lib/bfd-plugins')
+        os.makedirs(bfd_plugins)
+        shutil.copy2(llvmgold, bfd_plugins)
+
+        if not self.host.is_windows:
+            libcxx_1 = os.path.join(
+                clang_libs, versioned_so(self.host, 'libc++', '1'))
+            libcxx_abi_1 = os.path.join(
+                clang_libs, versioned_so(self.host, 'libc++abi', '1'))
+
+            # The rpath on LLVMgold.so is ../lib64, so we have to install to
+            # lib/lib64 to have it be in the right place :(
+            lib_dir = os.path.join(install_path, 'lib/lib64')
+            os.makedirs(lib_dir)
+            shutil.copy2(libcxx_1, lib_dir)
+            shutil.copy2(libcxx_abi_1, lib_dir)
+        else:
+            libwinpthread = os.path.join(clang_bin, 'libwinpthread-1.dll')
+            shutil.copy2(libwinpthread, bfd_plugins)
+
+
+class ShaderTools(ndk.builds.InvokeBuildModule):
+    name = 'shader-tools'
+    path = 'shader-tools/{host}'
+    script = 'build-shader-tools.py'
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+
+    @property
+    def notices(self) -> List[str]:
+        base = ndk.paths.android_path('external/shaderc')
+        shaderc_dir = os.path.join(base, 'shaderc')
+        spirv_dir = os.path.join(base, 'spirv-headers')
+        return [
+            os.path.join(shaderc_dir, 'LICENSE'),
+            os.path.join(shaderc_dir, 'third_party', 'LICENSE.spirv-tools'),
+            os.path.join(shaderc_dir, 'third_party', 'LICENSE.glslang'),
+            os.path.join(spirv_dir, 'LICENSE')
+        ]
+
+
+class Make(ndk.builds.AutoconfModule):
+    name = 'make'
+    path = 'prebuilt/{host}'
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+    src: Path = ndk.paths.ANDROID_DIR / 'toolchain/make'
+
+    @property
+    def notices(self) -> List[str]:
+        return [str(self.src / 'COPYING')]
+
+
+class Yasm(ndk.builds.AutoconfModule):
+    name = 'yasm'
+    path = 'prebuilt/{host}'
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+    src: Path = ndk.paths.ANDROID_DIR / 'toolchain/yasm'
+
+    @property
+    def notices(self) -> List[str]:
+        files = ['Artistic.txt', 'BSD.txt', 'COPYING', 'GNU_GPL-2.0',
+                 'GNU_LGPL-2.0']
+        return [str(self.src / f) for f in files]
+
+
+class NdkWhich(ndk.builds.FileModule):
+    name = 'ndk-which'
+    path = 'prebuilt/{host}/bin'
+    src = ndk.paths.ndk_path('ndk-which')
+
+
+class HostTools(ndk.builds.Module):
+    name = 'host-tools'
+    path = 'prebuilt/{host}'
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+
+    @property
+    def notices(self) -> List[str]:
+        return [
+            ndk.paths.android_path('toolchain/python/Python-2.7.5/LICENSE'),
+            ndk.paths.ndk_path('sources/host-tools/toolbox/NOTICE'),
+        ]
+
+    def build(self) -> None:
+        build_args = ndk.builds.common_build_args(self.out_dir, self.dist_dir,
+                                                  self.host)
+
+        if self.host.is_windows:
+            print('Building toolbox...')
+            ndk.builds.invoke_external_build(
+                'ndk/sources/host-tools/toolbox/build.py', build_args)
+
+        print('Building Python...')
+        ndk.builds.invoke_external_build(
+            'toolchain/python/build.py', build_args)
+
+    def install(self) -> None:
+        install_dir = self.get_install_path()
+        ndk.ext.shutil.create_directory(install_dir)
+
+        packages = [
+            'ndk-python'
+        ]
+
+        if self.host.is_windows:
+            packages.append('toolbox')
+
+        host_tag = ndk.hosts.host_to_tag(self.host)
+
+        package_names = [p + '-' + host_tag + '.tar.bz2' for p in packages]
+        for package_name in package_names:
+            package_path = os.path.join(self.out_dir, self.host.value,
+                                        package_name)
+            subprocess.check_call(
+                ['tar', 'xf', package_path, '-C', install_dir,
+                 '--strip-components=1'])
+
+
+def install_exe(out_dir: str, install_dir: str, name: str,
+                host: ndk.hosts.Host) -> None:
+    ext = '.exe' if host.is_windows else ''
+    exe_name = name + ext
+    src = os.path.join(out_dir, exe_name)
+    dst = os.path.join(install_dir, exe_name)
+
+    ndk.ext.shutil.create_directory(install_dir)
+    shutil.copy2(src, dst)
+
+
+def make_linker_script(path: str, libs: List[str]) -> None:
+    ndk.file.write_file(path, 'INPUT({})\n'.format(' '.join(libs)))
+
+
+def create_libcxx_linker_scripts(lib_dir: str, abi: ndk.abis.Abi,
+                                 api: int) -> None:
+    static_libs = ['-lc++_static', '-lc++abi']
+    is_arm = abi == 'armeabi-v7a'
+    needs_android_support = api < 21
+    if needs_android_support:
+        static_libs.append('-landroid_support')
+    if is_arm:
+        static_libs.extend(['-lunwind', '-latomic'])
+    make_linker_script(
+        os.path.join(lib_dir, 'libc++.a.{}'.format(api)), static_libs)
+
+    shared_libs = []
+    if needs_android_support:
+        shared_libs.append('-landroid_support')
+    if is_arm:
+        shared_libs.extend(['-lunwind', '-latomic'])
+    shared_libs.append('-lc++_shared')
+    make_linker_script(
+        os.path.join(lib_dir, 'libc++.so.{}'.format(api)), shared_libs)
+
+
+class Libcxx(ndk.builds.Module):
+    name = 'libc++'
+    path = 'sources/cxx-stl/llvm-libc++'
+    script = 'ndk/sources/cxx-stl/llvm-libc++/build.py'
+    notice = ndk.paths.android_path('external/libcxx/NOTICE')
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+    arch_specific = True
+    deps = {
+        'base-toolchain',
+        'libandroid_support',
+        'ndk-build',
+        'ndk-build-shortcut',
+    }
+
+    libcxx_path = ndk.paths.android_path('external/libcxx')
+
+    @property
+    def obj_out(self) -> str:
+        return os.path.join(self.out_dir, 'libcxx/obj')
+
+    @property
+    def lib_out(self) -> str:
+        return os.path.join(self.out_dir, 'libcxx/libs')
+
+    @property
+    def abis(self) -> List[ndk.abis.Abi]:
+        abis = []
+        for arch in self.arches:
+            abis.extend(ndk.abis.arch_to_abis(arch))
+        return abis
+
+    def build(self) -> None:
+        ndk_build = os.path.join(
+            self.get_dep('ndk-build').get_build_host_install(), 'ndk-build')
+        bionic_path = ndk.paths.android_path('bionic')
+
+        android_mk = os.path.join(self.libcxx_path, 'Android.mk')
+        application_mk = os.path.join(self.libcxx_path, 'Application.mk')
+
+        build_cmd = [
+            'bash', ndk_build, build_support.jobs_arg(), 'V=1',
+            'APP_ABI=' + ' '.join(self.abis),
+
+            # Since nothing in this build depends on libc++_static, we need to
+            # name it to force it to build.
+            'APP_MODULES=c++_shared c++_static',
+
+            'BIONIC_PATH=' + bionic_path,
+
+            # Tell ndk-build where all of our makefiles are and where outputs
+            # should go. The defaults in ndk-build are only valid if we have a
+            # typical ndk-build layout with a jni/{Android,Application}.mk.
+            'NDK_PROJECT_PATH=null',
+            'APP_BUILD_SCRIPT=' + android_mk,
+            'NDK_APPLICATION_MK=' + application_mk,
+            'NDK_OUT=' + self.obj_out,
+            'NDK_LIBS_OUT=' + self.lib_out,
+
+            # Make sure we don't pick up a cached copy.
+            'LIBCXX_FORCE_REBUILD=true',
+        ]
+
+        print('Running: ' + ' '.join([pipes.quote(arg) for arg in build_cmd]))
+        subprocess.check_call(build_cmd)
+
+    def install(self) -> None:
+        install_root = self.get_install_path()
+
+        if os.path.exists(install_root):
+            shutil.rmtree(install_root)
+        os.makedirs(install_root)
+
+        shutil.copy2(
+            os.path.join(self.libcxx_path, 'Android.mk'), install_root)
+        shutil.copy2(
+            os.path.join(self.libcxx_path, 'NOTICE'), install_root)
+        shutil.copytree(
+            os.path.join(self.libcxx_path, 'include'),
+            os.path.join(install_root, 'include'))
+        shutil.copytree(self.lib_out, os.path.join(install_root, 'libs'))
+
+        for abi in self.abis:
+            lib_dir = os.path.join(install_root, 'libs', abi)
+
+            # The static libraries installed to the obj dir, not the lib dir.
+            self.install_static_libs(lib_dir, abi)
+
+            # Create linker scripts for the libraries we use so that we link
+            # things properly even when we're not using ndk-build. The linker
+            # will read the script in place of the library so that we link the
+            # unwinder and other support libraries appropriately.
+            platforms_meta = json.loads(
+                ndk.file.read_file(ndk.paths.ndk_path('meta/platforms.json')))
+            for api in range(platforms_meta['min'], platforms_meta['max'] + 1):
+                if api < ndk.abis.min_api_for_abi(abi):
+                    continue
+
+                create_libcxx_linker_scripts(lib_dir, abi, api)
+
+    def install_static_libs(self, lib_dir: str, abi: ndk.abis.Abi) -> None:
+        static_lib_dir = os.path.join(self.obj_out, 'local', abi)
+
+        shutil.copy2(os.path.join(static_lib_dir, 'libc++abi.a'), lib_dir)
+        shutil.copy2(os.path.join(static_lib_dir, 'libc++_static.a'), lib_dir)
+
+        if abi == 'armeabi-v7a':
+            shutil.copy2(os.path.join(static_lib_dir, 'libunwind.a'), lib_dir)
+
+        if abi in ndk.abis.LP32_ABIS:
+            shutil.copy2(
+                os.path.join(static_lib_dir, 'libandroid_support.a'), lib_dir)
+
+
+class Platforms(ndk.builds.Module):
+    name = 'platforms'
+    path = 'platforms'
+
+    deps = {
+        'clang',
+        'binutils',
+    }
+
+    min_supported_api = 16
+
+    # These API levels had no new native APIs. The contents of these platforms
+    # directories would be identical to the previous extant API level, so they
+    # are not included in the NDK to save space.
+    skip_apis = (20, 25)
+
+    # We still need a numeric API level for codenamed API levels because
+    # ABI_ANDROID_API in crtbrand is an integer. We start counting the
+    # codenamed releases from 9000 and increment for each additional release.
+    # This is filled by get_apis.
+    codename_api_map: Dict[str, int] = {}
+
+    # Shared with the sysroot, though the sysroot NOTICE actually includes a
+    # lot more licenses. Platforms and Sysroot are essentially a single
+    # component that is split into two directories only temporarily, so this
+    # will be the end state when we merge the two anyway.
+    notice = ndk.paths.android_path('prebuilts/ndk/platform/sysroot/NOTICE')
+
+    def prebuilt_path(self, *args: str) -> str:  # pylint: disable=no-self-use
+        return ndk.paths.android_path('prebuilts/ndk/platform', *args)
+
+    def src_path(self, *args: str) -> str:  # pylint: disable=no-self-use
+        return ndk.paths.android_path('development/ndk/platforms', *args)
+
+    def binutils_tool(self, tool: str, arch: ndk.abis.Arch) -> str:
+        triple = build_support.arch_to_triple(arch)
+        binutils = self.get_dep('binutils').get_build_host_install(arch)
+        return os.path.join(binutils, 'bin', triple + '-' + tool)
+
+    # pylint: disable=no-self-use
+    def libdir_name(self, arch: ndk.abis.Arch) -> str:
+        if arch == 'x86_64':
+            return 'lib64'
+        else:
+            return 'lib'
+    # pylint: enable=no-self-use
+
+    def get_apis(self) -> List[Union[int, str]]:
+        codenamed_apis = []
+        apis: List[Union[int, str]] = []
+        for name in os.listdir(self.prebuilt_path('platforms')):
+            if not name.startswith('android-'):
+                continue
+
+            _, api_str = name.split('-')
+            try:
+                api = int(api_str)
+                if api >= self.min_supported_api:
+                    apis.append(api)
+            except ValueError:
+                # Codenamed release like android-O, android-O-MR1, etc.
+                # TODO: Remove this code path.
+                # I don't think we're actually using this. Since having
+                # non-integer API directories breaks all kinds of tools, we
+                # rename them when we check them in.
+                apis.append(api_str)
+                codenamed_apis.append(api_str)
+
+        for api_num, api_str in enumerate(sorted(codenamed_apis), start=9000):
+            self.codename_api_map[api_str] = api_num
+        return sorted(apis)
+
+    # pylint: disable=no-self-use
+    def get_arches(self, api: Union[int, str]) -> List[ndk.abis.Arch]:
+        arches = [ndk.abis.Arch('arm'), ndk.abis.Arch('x86')]
+        # All codenamed APIs are at 64-bit capable.
+        if isinstance(api, str) or api >= 21:
+            arches.extend([ndk.abis.Arch('arm64'), ndk.abis.Arch('x86_64')])
+        return arches
+    # pylint: enable=no-self-use
+
+    def get_build_cmd(self, dst: str, srcs: List[str], api: int,
+                      arch: ndk.abis.Arch,
+                      build_number: Union[int, str]) -> List[str]:
+        bionic_includes = ndk.paths.android_path(
+            'bionic/libc/arch-common/bionic')
+
+        cc = os.path.join(
+            self.get_dep('clang').get_build_host_install(), 'bin/clang')
+        binutils = self.get_dep('binutils').get_build_host_install(arch)
+
+        args = [
+            cc,
+            '-target',
+            ndk.abis.clang_target(arch),
+            '--sysroot',
+            self.prebuilt_path('sysroot'),
+            '-gcc-toolchain',
+            binutils,
+            '-I',
+            bionic_includes,
+            '-D__ANDROID_API__={}'.format(api),
+            '-DPLATFORM_SDK_VERSION={}'.format(api),
+            '-DABI_NDK_VERSION="{}"'.format(ndk.config.release),
+            '-DABI_NDK_BUILD_NUMBER="{}"'.format(build_number),
+            '-O2',
+            '-fpic',
+            '-Wl,-r',
+            '-no-pie',
+            '-nostdlib',
+            '-Wa,--noexecstack',
+            '-Wl,-z,noexecstack',
+            '-o',
+            dst,
+        ] + srcs
+
+        return args
+
+    def check_elf_note(self, obj_file: str) -> None:
+        # readelf is a cross platform tool, so arch doesn't matter.
+        readelf = self.binutils_tool('readelf', ndk.abis.Arch('arm'))
+        out = subprocess.check_output([readelf, '--notes', obj_file])
+        if 'Android' not in out.decode('utf-8'):
+            raise RuntimeError(
+                '{} does not contain NDK ELF note'.format(obj_file))
+
+    def build_crt_object(self, dst: str, srcs: List[str], api: Union[int, str],
+                         arch: ndk.abis.Arch, build_number: Union[int, str],
+                         defines: List[str]) -> None:
+        try:
+            # No-op for stable releases.
+            api_num = int(api)
+        except ValueError:
+            # ValueError means this was a codenamed release. We need the
+            # integer matching this release for ABI_ANDROID_API in crtbrand.
+            assert isinstance(api, str)
+            api_num = self.codename_api_map[api]
+
+        cc_args = self.get_build_cmd(dst, srcs, api_num, arch, build_number)
+        cc_args.extend(defines)
+
+        print('Running: ' + ' '.join([pipes.quote(arg) for arg in cc_args]))
+        subprocess.check_call(cc_args)
+
+    def build_crt_objects(self, dst_dir: str, api: Union[int, str],
+                          arch: ndk.abis.Arch,
+                          build_number: Union[int, str]) -> None:
+        src_dir = ndk.paths.android_path('bionic/libc/arch-common/bionic')
+        crt_brand = ndk.paths.ndk_path('sources/crt/crtbrand.S')
+
+        objects = {
+            'crtbegin_dynamic.o': [
+                os.path.join(src_dir, 'crtbegin.c'),
+                crt_brand,
+            ],
+            'crtbegin_so.o': [
+                os.path.join(src_dir, 'crtbegin_so.c'),
+                crt_brand,
+            ],
+            'crtbegin_static.o': [
+                os.path.join(src_dir, 'crtbegin.c'),
+                crt_brand,
+            ],
+            'crtend_android.o': [
+                os.path.join(src_dir, 'crtend.S'),
+            ],
+            'crtend_so.o': [
+                os.path.join(src_dir, 'crtend_so.S'),
+            ],
+        }
+
+        for name, srcs in objects.items():
+            dst_path = os.path.join(dst_dir, name)
+            defs = []
+            if name == 'crtbegin_static.o':
+                # libc.a is always the latest version, so ignore the API level
+                # setting for crtbegin_static.
+                defs.append('-D_FORCE_CRT_ATFORK')
+            self.build_crt_object(
+                dst_path, srcs, api, arch, build_number, defs)
+            if name.startswith('crtbegin'):
+                self.check_elf_note(dst_path)
+
+    def build(self) -> None:
+        build_dir = os.path.join(self.out_dir, self.path)
+        if os.path.exists(build_dir):
+            shutil.rmtree(build_dir)
+
+        for api in self.get_apis():
+            if api in self.skip_apis:
+                continue
+
+            platform = 'android-{}'.format(api)
+            for arch in self.get_arches(api):
+                arch_name = 'arch-{}'.format(arch)
+                dst_dir = os.path.join(build_dir, platform, arch_name)
+                os.makedirs(dst_dir)
+                assert self.context is not None
+                self.build_crt_objects(dst_dir, api, arch,
+                                       self.context.build_number)
+
+    def install(self) -> None:
+        build_dir = os.path.join(self.out_dir, self.path)
+        install_dir = self.get_install_path()
+
+        if os.path.exists(install_dir):
+            shutil.rmtree(install_dir)
+        os.makedirs(install_dir)
+
+        for api in self.get_apis():
+            if api in self.skip_apis:
+                continue
+
+            # Copy shared libraries from prebuilts/ndk/platform/platforms.
+            platform = 'android-{}'.format(api)
+            platform_src = self.prebuilt_path('platforms', platform)
+            platform_dst = os.path.join(install_dir, 'android-{}'.format(api))
+            shutil.copytree(platform_src, platform_dst)
+
+            for arch in self.get_arches(api):
+                arch_name = 'arch-{}'.format(arch)
+                triple = ndk.abis.arch_to_triple(arch)
+
+                # Install static libraries from prebuilts/ndk/platform/sysroot.
+                # TODO: Determine if we can change the build system to use the
+                # libraries directly from the sysroot directory rather than
+                # duplicating all the libraries in platforms.
+                lib_dir = self.prebuilt_path('sysroot/usr/lib', triple)
+                libdir_name = self.libdir_name(arch)
+                lib_dir_dst = os.path.join(
+                    install_dir, platform, arch_name, 'usr', libdir_name)
+                for name in os.listdir(lib_dir):
+                    lib_src = os.path.join(lib_dir, name)
+                    lib_dst = os.path.join(lib_dir_dst, name)
+                    shutil.copy2(lib_src, lib_dst)
+
+                if libdir_name == 'lib64':
+                    # The Clang driver won't accept a sysroot that contains
+                    # only a lib64. An empty lib dir is enough to convince it.
+                    os.makedirs(os.path.join(
+                        install_dir, platform, arch_name, 'usr/lib'))
+
+                # Install the CRT objects that we just built.
+                obj_dir = os.path.join(build_dir, platform, arch_name)
+                for name in os.listdir(obj_dir):
+                    obj_src = os.path.join(obj_dir, name)
+                    obj_dst = os.path.join(lib_dir_dst, name)
+                    shutil.copy2(obj_src, obj_dst)
+
+        # https://github.com/android-ndk/ndk/issues/372
+        for root, dirs, files in os.walk(install_dir):
+            if not files and not dirs:
+                with open(os.path.join(root, '.keep_dir'), 'w') as keep_file:
+                    keep_file.write(
+                        'This file forces git to keep the directory.')
+
+
+class Gdb(ndk.builds.Module):
+    """Module for multi-arch host GDB.
+
+    Note that the device side, gdbserver, is a separate module because it needs
+    to be cross compiled for all four Android ABIs.
+    """
+
+    name = 'gdb'
+    path = 'prebuilt/{host}'
+
+    deps = {
+        'make',
+        'host-tools',
+    }
+
+    GDB_VERSION = '8.3'
+
+    expat_src = ndk.paths.ANDROID_DIR / 'toolchain/expat/expat-2.0.1'
+    lzma_src = ndk.paths.ANDROID_DIR / 'toolchain/xz'
+    gdb_src = ndk.paths.ANDROID_DIR / f'toolchain/gdb/gdb-{GDB_VERSION}'
+
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+
+    _expat_builder: Optional[ndk.autoconf.AutoconfBuilder] = None
+    _lzma_builder: Optional[ndk.autoconf.AutoconfBuilder] = None
+    _gdb_builder: Optional[ndk.autoconf.AutoconfBuilder] = None
+
+    @property
+    def notices(self) -> List[str]:
+        return [
+            str(self.expat_src / 'COPYING'),
+            str(self.gdb_src / 'COPYING'),
+            str(self.lzma_src / 'COPYING'),
+        ]
+
+    @property
+    def expat_builder(self) -> ndk.autoconf.AutoconfBuilder:
+        """Returns the lazily initialized expat builder for this module."""
+        if self._expat_builder is None:
+            self._expat_builder = ndk.autoconf.AutoconfBuilder(
+                self.expat_src / 'configure',
+                self.intermediate_out_dir / 'expat',
+                self.host,
+                use_clang=True)
+        return self._expat_builder
+
+    @property
+    def lzma_builder(self) -> ndk.autoconf.AutoconfBuilder:
+        """Returns the lazily initialized lzma builder for this module."""
+        if self._lzma_builder is None:
+            self._lzma_builder = ndk.autoconf.AutoconfBuilder(
+                self.lzma_src / 'configure',
+                self.intermediate_out_dir / 'lzma',
+                self.host,
+                add_toolchain_to_path=True,
+                use_clang=True)
+        return self._lzma_builder
+
+    @property
+    def gdb_builder(self) -> ndk.autoconf.AutoconfBuilder:
+        """Returns the lazily initialized gdb builder for this module."""
+        if self._gdb_builder is None:
+            no_build_or_host = False
+            no_strip = False
+            additional_flags = []
+            if self.host == ndk.hosts.Host.Darwin:
+                # Awful Darwin hack. For some reason GDB doesn't produce a gdb
+                # executable when using --build/--host.
+                no_build_or_host = True
+                # -s caused mysterious linking error on old macOS like:
+                # "ld: internal error: atom not found in symbolIndex(...)"
+                # Seems old ld64 wrongly stripped some symbols.
+                # Remove this when build server upgrades to xcode 7.3
+                # (ld64-264.3.101) or above.
+                no_strip = True
+                additional_flags.append('-Wl,-rpath,@loader_path/../lib')
+            if self.host == ndk.hosts.Host.Linux:
+                additional_flags.append('-Wl,-rpath,$$$$\\ORIGIN/../lib')
+            # Add path for libc++.
+            clang_path = ndk.toolchains.ClangToolchain.path_for_host(self.host)
+            additional_flags.append('-L' + str(clang_path / 'lib64'))
+            self._gdb_builder = ndk.autoconf.AutoconfBuilder(
+                self.gdb_src / 'configure',
+                self.intermediate_out_dir / 'gdb',
+                self.host,
+                use_clang=True,
+                no_build_or_host=no_build_or_host,
+                no_strip=no_strip,
+                additional_flags=additional_flags)
+        return self._gdb_builder
+
+    @property
+    def gdb_stub_install_path(self) -> Path:
+        """The gdb stub install path."""
+        return self.gdb_builder.install_directory / 'bin/gdb-stub'
+
+    def build_expat(self) -> None:
+        """Builds the expat dependency."""
+        self.expat_builder.build([
+            '--disable-shared',
+            '--enable-static',
+        ])
+
+    def build_lzma(self) -> None:
+        """Builds the liblzma dependency."""
+        self.lzma_builder.build([
+            '--disable-shared',
+            '--enable-static',
+            '--disable-xz',
+            '--disable-xzdec',
+            '--disable-lzmadev',
+            '--disable-scripts',
+            '--disable-doc',
+        ])
+
+    def build_gdb(self) -> None:
+        """Builds GDB itself."""
+        targets = ' '.join(ndk.abis.ALL_TRIPLES)
+        # TODO: Cleanup Python module so we don't need this explicit path.
+        python_config = (Path(
+            self.out_dir) / self.host.value / 'python' / ndk.hosts.host_to_tag(
+                self.host) / 'install/host-tools/bin/python-config.sh')
+        configure_args = [
+            '--with-expat',
+            f'--with-libexpat-prefix={self.expat_builder.install_directory}',
+            f'--with-python={python_config}',
+            f'--enable-targets={targets}',
+            '--disable-shared',
+            '--disable-werror',
+            '--disable-nls',
+            '--disable-docs',
+            '--without-mpc',
+            '--without-mpfr',
+            '--without-gmp',
+            '--without-isl',
+            '--disable-sim',
+            '--enable-gdbserver=no',
+        ]
+
+        configure_args.extend([
+            '--with-lzma',
+            f'--with-liblzma-prefix={self.lzma_builder.install_directory}',
+        ])
+
+        self.gdb_builder.build(configure_args)
+
+    def build_gdb_stub(self) -> None:
+        """Builds a gdb wrapper to setup PYTHONHOME.
+
+        We need to use gdb with the Python it was built against, so we need to
+        setup PYTHONHOME to point to the NDK's Python, not the host's.
+        """
+        if self.host.is_windows:
+            # TODO: Does it really need to be an executable?
+            # It's probably an executable because the original author wanted a
+            # .exe rather than a .cmd. Not sure how disruptive this change
+            # would be now. Presumably hardly at all because everyone needs to
+            # use ndk-gdb for reasonable behavior anyway?
+            self.build_exe_gdb_stub()
+        else:
+            self.build_sh_gdb_stub()
+
+    def build_exe_gdb_stub(self) -> None:
+        # Don't need to worry about extension here because it'll be renamed on
+        # install anyway.
+        gdb_stub_path = self.gdb_builder.install_directory / 'bin/gdb-stub'
+        stub_src = ndk.paths.NDK_DIR / 'sources/host-tools/gdb-stub/gdb-stub.c'
+        mingw_path = (ndk.paths.ANDROID_DIR /
+                      'prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin'
+                      / 'x86_64-w64-mingw32-gcc')
+
+        cmd = [
+            str(mingw_path),
+            '-O2',
+            '-s',
+            '-DNDEBUG',
+            str(stub_src),
+            '-o',
+            str(gdb_stub_path),
+        ]
+        pp_cmd = ' '.join([pipes.quote(arg) for arg in cmd])
+        print('Running: {}'.format(pp_cmd))
+        subprocess.run(cmd, check=True)
+
+    def build_sh_gdb_stub(self) -> None:
+        self.gdb_stub_install_path.write_text(
+            textwrap.dedent("""\
+            #!/bin/bash
+            GDBDIR=$(cd $(dirname $0) && pwd)
+            PYTHONHOME="$GDBDIR/.." "$GDBDIR/gdb-orig" "$@"
+            """))
+        self.gdb_stub_install_path.chmod(0o755)
+
+    def build(self) -> None:
+        """Builds GDB."""
+        if self.intermediate_out_dir.exists():
+            shutil.rmtree(self.intermediate_out_dir)
+
+        self.build_expat()
+        self.build_lzma()
+        self.build_gdb()
+        self.build_gdb_stub()
+
+    def install(self) -> None:
+        """Installs GDB."""
+        install_dir = Path(self.get_install_path())
+        copy_tree(
+            str(self.gdb_builder.install_directory / 'bin'),
+            str(install_dir / 'bin'))
+        gdb_share_dir = self.gdb_builder.install_directory / 'share/gdb'
+        gdb_share_install_dir = install_dir / 'share/gdb'
+        if gdb_share_install_dir.exists():
+            shutil.rmtree(gdb_share_install_dir)
+        shutil.copytree(gdb_share_dir, gdb_share_install_dir)
+
+        exe_suffix = '.exe' if self.host.is_windows else ''
+        gdb_exe = install_dir / ('bin/gdb' + exe_suffix)
+
+        # Strip is skipped when build. Strip the binary now.
+        if self.host == ndk.hosts.Host.Darwin:
+            cmd = [str(self.gdb_builder.toolchain.strip), str(gdb_exe)]
+            subprocess.check_call(cmd)
+
+        # gdb is currently gdb(.exe)? and the gdb stub is currently gdb-stub.
+        # Make them gdb-orig(.exe)? and gdb(.exe)? respectively.
+        gdb_exe.rename(install_dir / ('bin/gdb-orig' + exe_suffix))
+
+        gdb_stub = install_dir / 'bin/gdb-stub'
+        gdb_stub.rename(install_dir / ('bin/gdb' + exe_suffix))
+
+        # Install libc++.
+        clang_path = ndk.toolchains.ClangToolchain.path_for_host(self.host)
+        libcxx_files = {
+            ndk.hosts.Host.Darwin: ['libc++abi.1.dylib', 'libc++.1.dylib'],
+            ndk.hosts.Host.Linux: ['libc++abi.so.1', 'libc++.so.1'],
+            ndk.hosts.Host.Windows64: [],
+        }
+        for f in libcxx_files[self.host]:
+            shutil.copy(clang_path / 'lib64' / f, install_dir / 'lib')
+
+
+class GdbServer(ndk.builds.Module):
+    name = 'gdbserver'
+    path = 'prebuilt/android-{arch}/gdbserver'
+    notice = ndk.paths.android_path(
+        f'toolchain/gdb/gdb-{Gdb.GDB_VERSION}/gdb/COPYING')
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+    arch_specific = True
+    split_build_by_arch = True
+    deps = {
+        'toolchain',
+    }
+    max_api = Platforms().get_apis()[-1]
+
+    libthread_db_src_dir = ndk.paths.ndk_path('sources/android/libthread_db')
+    gdbserver_src_dir = ndk.paths.android_path(
+        f'toolchain/gdb/gdb-{Gdb.GDB_VERSION}/gdb/gdbserver')
+
+    @property
+    def build_dir(self) -> str:
+        """Returns the build directory for the current architecture."""
+        assert self.build_arch is not None
+        return os.path.join(self.out_dir, self.name, self.build_arch)
+
+    @property
+    def libthread_db_a_path(self) -> str:
+        """Returns the path to the built libthread_db.a."""
+        return os.path.join(self.build_dir, 'libthread_db.a')
+
+    def get_tool(self, name: str, arch: Optional[ndk.abis.Arch] = None) -> str:
+        """Returns the path to the given tool in the toolchain.
+
+        Args:
+            name: Name of the tool. e.g. 'ar'.
+            arch: Optional architecture for architecture specific tools.
+
+        Returns:
+            Path to the specified tool.
+        """
+        toolchain_bin = os.path.join(
+            self.get_dep('toolchain').get_build_host_install(), 'bin')
+
+        if arch is not None:
+            triple = ndk.abis.arch_to_triple(arch)
+            name = f'{triple}-{name}'
+        return os.path.join(toolchain_bin, name)
+
+    def build_libthread_db(self, api_level: int) -> None:
+        """Builds libthread_db.a for the current architecture."""
+        assert self.build_arch is not None
+
+        libthread_db_c = os.path.join(self.libthread_db_src_dir,
+                                      'libthread_db.c')
+        libthread_db_o = os.path.join(self.build_dir, 'libthread_db.o')
+        cc_args = [
+            self.get_tool('clang'),
+            '-target',
+            ndk.abis.clang_target(self.build_arch, api_level),
+            '-I',
+            self.libthread_db_src_dir,
+            '-o',
+            libthread_db_o,
+            '-c',
+            libthread_db_c,
+        ]
+
+        print('Running: {}'.format(' '.join(
+            [pipes.quote(arg) for arg in cc_args])))
+        subprocess.run(cc_args, check=True)
+
+        ar_args = [
+            self.get_tool('ar', self.build_arch),
+            'rD',
+            self.libthread_db_a_path,
+            libthread_db_o,
+        ]
+
+        print('Running: {}'.format(' '.join(
+            [pipes.quote(arg) for arg in ar_args])))
+        subprocess.run(ar_args, check=True)
+
+    def configure(self, api_level: int) -> None:
+        """Configures the gdbserver build for the current architecture."""
+        assert self.build_arch is not None
+        gdbserver_host = {
+            'arm': 'arm-eabi-linux',
+            'arm64': 'aarch64-eabi-linux',
+            'x86': 'i686-linux-android',
+            'x86_64': 'x86_64-linux-android',
+        }[self.build_arch]
+
+        cflags = ['-O2', '-I' + self.libthread_db_src_dir]
+        if self.build_arch.startswith('arm'):
+            cflags.append('-fno-short-enums')
+        if self.build_arch.endswith('64'):
+            cflags.append('-DUAPI_HEADERS')
+
+        ldflags = '-static -fuse-ld=gold -Wl,-z,nocopyreloc -Wl,--no-undefined'
+
+        # Use --target as part of CC so it is used when linking as well.
+        clang = '{} --target={}'.format(
+            self.get_tool('clang'),
+            ndk.abis.clang_target(self.build_arch, api_level))
+        clangplusplus = '{} --target={}'.format(
+            self.get_tool('clang++'),
+            ndk.abis.clang_target(self.build_arch, api_level))
+        configure_env = {
+            'CC': clang,
+            'CXX': clangplusplus,
+            'AR': self.get_tool('ar', self.build_arch),
+            'RANLIB': self.get_tool('ranlib', self.build_arch),
+            'CFLAGS': ' '.join(cflags),
+            'CXXFLAGS': ' '.join(cflags),
+            'LDFLAGS': ldflags,
+        }
+
+        configure_args = [
+            os.path.join(self.gdbserver_src_dir, 'configure'),
+            '--build=x86_64-linux-gnu',
+            f'--host={gdbserver_host}',
+            f'--with-libthread-db={self.libthread_db_a_path}',
+            '--disable-inprocess-agent',
+            '--enable-werror=no',
+        ]
+
+        subproc_env = dict(os.environ)
+        subproc_env.update(configure_env)
+        print('Running: {} with env:\n{}'.format(
+            ' '.join([pipes.quote(arg) for arg in configure_args]),
+            pprint.pformat(configure_env, indent=4)))
+        subprocess.run(configure_args, env=subproc_env, check=True)
+
+    def make(self) -> None:
+        """Runs make for the configured build."""
+        subprocess.run(['make', build_support.jobs_arg()], check=True)
+
+    def build(self) -> None:
+        """Builds gdbserver."""
+        if not os.path.exists(self.build_dir):
+            os.makedirs(self.build_dir)
+
+        max_api = Platforms().get_apis()[-1]
+        # Make sure the max_api is not a codenamed release. It should never
+        # happen since letters will sort before numbers.
+        assert isinstance(max_api, int)
+        with ndk.ext.os.cd(self.build_dir):
+            self.build_libthread_db(max_api)
+            self.configure(max_api)
+            self.make()
+
+    def install(self) -> None:
+        """Installs gdbserver."""
+        if os.path.exists(self.get_install_path()):
+            shutil.rmtree(self.get_install_path())
+        os.makedirs(self.get_install_path())
+
+        objcopy_args = [
+            self.get_tool('objcopy', self.build_arch),
+            '--strip-unneeded',
+            os.path.join(self.build_dir, 'gdbserver'),
+            os.path.join(self.get_install_path(), 'gdbserver'),
+        ]
+        subprocess.run(objcopy_args, check=True)
+
+
+class LibShaderc(ndk.builds.Module):
+    name = 'libshaderc'
+    path = 'sources/third_party/shaderc'
+    src = ndk.paths.android_path('external/shaderc')
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+
+    @property
+    def notices(self) -> List[str]:
+        shaderc_dir = os.path.join(self.src, 'shaderc')
+        return [
+            os.path.join(shaderc_dir, 'LICENSE'),
+            os.path.join(shaderc_dir, 'third_party', 'LICENSE.glslang'),
+            os.path.join(shaderc_dir, 'third_party', 'LICENSE.spirv-tools'),
+        ]
+
+    def build(self) -> None:
         copies = [
             {
-                "source_dir": str(self.src / "shaderc"),
-                "dest_dir": "",
-                "files": [
-                    "Android.mk",
-                    "libshaderc/Android.mk",
-                    "libshaderc_util/Android.mk",
-                    "third_party/Android.mk",
-                    "utils/update_build_version.py",
-                    "CHANGES",
+                'source_dir': os.path.join(self.src, 'shaderc'),
+                'dest_dir': 'shaderc',
+                'files': [
+                    'Android.mk', 'libshaderc/Android.mk',
+                    'libshaderc_util/Android.mk',
+                    'third_party/Android.mk',
+                    'utils/update_build_version.py',
+                    'CHANGES',
                 ],
-                "dirs": [
-                    "libshaderc/include",
-                    "libshaderc/src",
-                    "libshaderc_util/include",
-                    "libshaderc_util/src",
+                'dirs': [
+                    'libshaderc/include', 'libshaderc/src',
+                    'libshaderc_util/include', 'libshaderc_util/src',
                 ],
             },
             {
-                "source_dir": str(self.src / "spirv-tools"),
-                "dest_dir": "third_party/spirv-tools",
-                "files": [
-                    "utils/generate_grammar_tables.py",
-                    "utils/generate_language_headers.py",
-                    "utils/generate_registry_tables.py",
-                    "utils/update_build_version.py",
-                    "Android.mk",
-                    "CHANGES",
+                'source_dir': os.path.join(self.src, 'spirv-tools'),
+                'dest_dir': 'shaderc/third_party/spirv-tools',
+                'files': [
+                    'utils/generate_grammar_tables.py',
+                    'utils/generate_language_headers.py',
+                    'utils/generate_registry_tables.py',
+                    'utils/update_build_version.py',
+                    'Android.mk',
+                    'CHANGES',
                 ],
-                "dirs": ["include", "source"],
+                'dirs': ['include', 'source'],
             },
             {
-                "source_dir": str(self.src / "spirv-headers"),
-                "dest_dir": "third_party/spirv-tools/external/spirv-headers",
-                "dirs": ["include"],
-                "files": [
-                    "include/spirv/1.0/spirv.py",
-                    "include/spirv/1.1/spirv.py",
-                    "include/spirv/1.2/spirv.py",
-                    "include/spirv/uinified1/spirv.py",
+                'source_dir': os.path.join(self.src, 'spirv-headers'),
+                'dest_dir':
+                    'shaderc/third_party/spirv-tools/external/spirv-headers',
+                'dirs': ['include'],
+                'files': [
+                    'include/spirv/1.0/spirv.py',
+                    'include/spirv/1.1/spirv.py',
+                    'include/spirv/1.2/spirv.py'
+                    'include/spirv/uinified1/spirv.py'
                 ],
             },
             {
-                "source_dir": str(self.src / "glslang"),
-                "dest_dir": "third_party/glslang",
-                "files": [
-                    "Android.mk",
-                    "glslang/OSDependent/osinclude.h",
-                    # Build version info is generated from the CHANGES.md file.
-                    "CHANGES.md",
-                    "build_info.h.tmpl",
-                    "build_info.py",
-                    "StandAlone/DirStackFileIncluder.h",
-                    "StandAlone/ResourceLimits.h",
-                ],
-                "dirs": [
-                    "SPIRV",
-                    "OGLCompilersDLL",
-                    "glslang/CInterface",
-                    "glslang/GenericCodeGen",
-                    "hlsl",
-                    "glslang/HLSL",
-                    "glslang/Include",
-                    "glslang/MachineIndependent",
-                    "glslang/OSDependent/Unix",
-                    "glslang/Public",
+                'source_dir': os.path.join(self.src, 'glslang'),
+                'dest_dir': 'shaderc/third_party/glslang',
+                'files': ['Android.mk', 'glslang/OSDependent/osinclude.h'],
+                'dirs': [
+                    'SPIRV',
+                    'OGLCompilersDLL',
+                    'glslang/GenericCodeGen',
+                    'hlsl',
+                    'glslang/Include',
+                    'glslang/MachineIndependent',
+                    'glslang/OSDependent/Unix',
+                    'glslang/Public',
                 ],
             },
         ]
 
         default_ignore_patterns = shutil.ignore_patterns(
-            "*CMakeLists.txt", "*.py", "*test.h", "*test.cc"
-        )
+            "*CMakeLists.txt",
+            "*.py",
+            "*test.h",
+            "*test.cc")
 
-        install_dir = self.get_install_path()
-        if install_dir.exists():
-            shutil.rmtree(install_dir)
+        temp_dir = tempfile.mkdtemp()
+        shaderc_path = os.path.join(temp_dir, 'shaderc')
+        try:
+            for properties in copies:
+                source_dir = properties['source_dir']
+                assert isinstance(source_dir, str)
+                assert isinstance(properties['dest_dir'], str)
+                dest_dir = os.path.join(temp_dir, properties['dest_dir'])
+                for d in properties['dirs']:
+                    assert isinstance(d, str)
+                    src = os.path.join(source_dir, d)
+                    dst = os.path.join(dest_dir, d)
+                    print(src, " -> ", dst)
+                    shutil.copytree(src, dst,
+                                    ignore=default_ignore_patterns)
+                for f in properties['files']:
+                    print(source_dir, ':', dest_dir, ":", f)
+                    # Only copy if the source file exists.  That way
+                    # we can update this script in anticipation of
+                    # source files yet-to-come.
+                    assert isinstance(f, str)
+                    if os.path.exists(os.path.join(source_dir, f)):
+                        install_file(f, source_dir, dest_dir)
+                    else:
+                        print(source_dir, ':', dest_dir, ":", f, "SKIPPED")
 
-        for properties in copies:
-            source_dir = properties["source_dir"]
-            assert isinstance(source_dir, str)
-            assert isinstance(properties["dest_dir"], str)
-            dest_dir = install_dir / properties["dest_dir"]
-            for d in properties["dirs"]:
-                assert isinstance(d, str)
-                src = Path(source_dir) / d
-                dst = Path(dest_dir) / d
-                print(src, " -> ", dst)
-                shutil.copytree(src, dst, ignore=default_ignore_patterns)
-            for f in properties["files"]:
-                print(source_dir, ":", dest_dir, ":", f)
-                # Only copy if the source file exists.  That way
-                # we can update this script in anticipation of
-                # source files yet-to-come.
-                assert isinstance(f, str)
-                if (Path(source_dir) / f).exists():
-                    install_file(f, Path(source_dir), Path(dest_dir))
-                else:
-                    print(source_dir, ":", dest_dir, ":", f, "SKIPPED")
+            build_support.make_package('libshaderc', shaderc_path,
+                                       self.dist_dir)
+        finally:
+            shutil.rmtree(temp_dir)
 
 
-@register
 class CpuFeatures(ndk.builds.PackageModule):
-    name = "cpufeatures"
-    install_path = Path("sources/android/cpufeatures")
-    src = NDK_DIR / "sources/android/cpufeatures"
+    name = 'cpufeatures'
+    path = 'sources/android/cpufeatures'
+    src = ndk.paths.ndk_path('sources/android/cpufeatures')
 
 
-@register
 class NativeAppGlue(ndk.builds.PackageModule):
-    name = "native_app_glue"
-    install_path = Path("sources/android/native_app_glue")
-    src = NDK_DIR / "sources/android/native_app_glue"
+    name = 'native_app_glue'
+    path = 'sources/android/native_app_glue'
+    src = ndk.paths.ndk_path('sources/android/native_app_glue')
 
 
-@register
 class NdkHelper(ndk.builds.PackageModule):
-    name = "ndk_helper"
-    install_path = Path("sources/android/ndk_helper")
-    src = NDK_DIR / "sources/android/ndk_helper"
+    name = 'ndk_helper'
+    path = 'sources/android/ndk_helper'
+    src = ndk.paths.ndk_path('sources/android/ndk_helper')
 
 
-@register
 class Gtest(ndk.builds.PackageModule):
-    name = "gtest"
-    install_path = Path("sources/third_party/googletest")
-    src = ANDROID_DIR / "external/googletest/googletest"
+    name = 'gtest'
+    path = 'sources/third_party/googletest'
+    src = ndk.paths.android_path('external/googletest/googletest')
 
     def install(self) -> None:
         super().install()
-        # Docs are moved to top level directory.
-        shutil.rmtree(self.get_install_path() / "docs")
+
+        # GTest renamed these files to be all lower case, but the SDK patcher
+        # doesn't handle that properly. Rename them back to the old names so
+        # the SDK patches apply properly.
+        # http://b/122741472
+        install_dir = self.get_install_path()
+        docs_dir = os.path.join(install_dir, 'docs')
+        rename_map = {
+            'faq.md': 'FAQ.md',
+            'primer.md': 'Primer.md',
+            'samples.md': 'Samples.md',
+        }
+        for rename_from, rename_to in rename_map.items():
+            os.rename(
+                os.path.join(docs_dir, rename_from),
+                os.path.join(docs_dir, rename_to))
 
 
-@register
 class Sysroot(ndk.builds.Module):
-    name = "sysroot"
-    install_path = Path("sysroot")
-    notice = PREBUILT_SYSROOT / "NOTICE"
-    intermediate_module = True
-    deps = {"clang"}
-
-    def __init__(self) -> None:
-        super().__init__()
-        self.crt_builder: CrtObjectBuilder | None = None
+    name = 'sysroot'
+    path = 'sysroot'
+    notice = ndk.paths.android_path('prebuilts/ndk/platform/sysroot/NOTICE')
 
     def build(self) -> None:
-        build_dir = self.out_dir / self.install_path
-        if build_dir.exists():
-            shutil.rmtree(build_dir)
+        temp_dir = tempfile.mkdtemp()
+        try:
+            path = ndk.paths.android_path('prebuilts/ndk/platform/sysroot')
+            install_path = os.path.join(temp_dir, 'sysroot')
+            shutil.copytree(path, install_path)
+            if self.host != 'linux':
+                # linux/netfilter has some headers with names that differ only
+                # by case, which can't be extracted to a case-insensitive
+                # filesystem, which are the defaults for Darwin and Windows :(
+                #
+                # There isn't really a good way to decide which of these to
+                # keep and which to remove. The capitalized versions expose
+                # different APIs, but we can't keep both. So far no one has
+                # filed bugs about needing either API, so let's just dedup them
+                # consistently and we can change that if we hear otherwise.
+                remove_paths = [
+                    'usr/include/linux/netfilter_ipv4/ipt_ECN.h',
+                    'usr/include/linux/netfilter_ipv4/ipt_TTL.h',
+                    'usr/include/linux/netfilter_ipv6/ip6t_HL.h',
+                    'usr/include/linux/netfilter/xt_CONNMARK.h',
+                    'usr/include/linux/netfilter/xt_DSCP.h',
+                    'usr/include/linux/netfilter/xt_MARK.h',
+                    'usr/include/linux/netfilter/xt_RATEEST.h',
+                    'usr/include/linux/netfilter/xt_TCPMSS.h',
+                ]
+                for remove_path in remove_paths:
+                    os.remove(os.path.join(install_path, remove_path))
 
-        assert self.context is not None
-        self.crt_builder = CrtObjectBuilder(
-            self.get_dep("clang").get_build_host_install(),
-            build_dir,
-            self.context.build_number,
-        )
-        self.crt_builder.build()
+            ndk_version_h_path = os.path.join(
+                install_path, 'usr/include/android/ndk-version.h')
+            with open(ndk_version_h_path, 'w') as ndk_version_h:
+                major = ndk.config.major
+                minor = ndk.config.hotfix
+                beta = ndk.config.beta
+                canary = '1' if ndk.config.canary else '0'
+                assert self.context is not None
+                build = self.context.build_number
+                if build == 'dev':
+                    build = '0'
 
-    def install(self) -> None:
-        install_path = self.get_install_path()
-        if install_path.exists():
-            shutil.rmtree(install_path)
-        shutil.copytree(PREBUILT_SYSROOT, install_path)
-        if self.host is not Host.Linux:
-            # linux/netfilter has some headers with names that differ only
-            # by case, which can't be extracted to a case-insensitive
-            # filesystem, which are the defaults for Darwin and Windows :(
-            #
-            # There isn't really a good way to decide which of these to
-            # keep and which to remove. The capitalized versions expose
-            # different APIs, but we can't keep both. So far no one has
-            # filed bugs about needing either API, so let's just dedup them
-            # consistently and we can change that if we hear otherwise.
-            remove_paths = [
-                "usr/include/linux/netfilter_ipv4/ipt_ECN.h",
-                "usr/include/linux/netfilter_ipv4/ipt_TTL.h",
-                "usr/include/linux/netfilter_ipv6/ip6t_HL.h",
-                "usr/include/linux/netfilter/xt_CONNMARK.h",
-                "usr/include/linux/netfilter/xt_DSCP.h",
-                "usr/include/linux/netfilter/xt_MARK.h",
-                "usr/include/linux/netfilter/xt_RATEEST.h",
-                "usr/include/linux/netfilter/xt_TCPMSS.h",
-            ]
-            for remove_path in remove_paths:
-                os.remove(install_path / remove_path)
+                ndk_version_h.write(textwrap.dedent("""\
+                    #ifndef ANDROID_NDK_VERSION_H
+                    #define ANDROID_NDK_VERSION_H
 
-        assert self.context is not None
-        NdkVersionHeaderGenerator(
-            ndk.config.major,
-            ndk.config.hotfix,
-            ndk.config.beta,
-            self.context.build_number,
-            ndk.config.canary,
-        ).write(install_path / "usr/include/android/ndk-version.h")
+                    /**
+                     * Major version of this NDK.
+                     *
+                     * For example: 16 for r16.
+                     */
+                    #define __NDK_MAJOR__ {major}
 
-        # Install the CRT objects that we just built.
-        assert self.crt_builder is not None
-        for abi, api, path in self.crt_builder.artifacts:
-            lib_dir_dst = (
-                install_path / "usr/lib" / ndk.abis.abi_to_triple(abi) / str(api)
-            )
-            obj_dst = lib_dir_dst / path.name
-            shutil.copy2(path, obj_dst)
+                    /**
+                     * Minor version of this NDK.
+                     *
+                     * For example: 0 for r16 and 1 for r16b.
+                     */
+                    #define __NDK_MINOR__ {minor}
+
+                    /**
+                     * Set to 0 if this is a release build, or 1 for beta 1,
+                     * 2 for beta 2, and so on.
+                     */
+                    #define __NDK_BETA__ {beta}
+
+                    /**
+                     * Build number for this NDK.
+                     *
+                     * For a local development build of the NDK, this is -1.
+                     */
+                    #define __NDK_BUILD__ {build}
+
+                    /**
+                     * Set to 1 if this is a canary build, 0 if not.
+                     */
+                    #define __NDK_CANARY__ {canary}
+
+                    #endif  /* ANDROID_NDK_VERSION_H */
+                    """.format(
+                        major=major,
+                        minor=minor,
+                        beta=beta,
+                        build=build,
+                        canary=canary)))
+
+            build_support.make_package('sysroot', install_path, self.dist_dir)
+        finally:
+            shutil.rmtree(temp_dir)
 
 
-def write_clang_shell_script(
-    wrapper_path: Path, clang_name: str, flags: List[str]
-) -> None:
-    wrapper_path.write_text(
-        textwrap.dedent(
-            f"""\
-            #!/usr/bin/env bash
-            bin_dir=`dirname "$0"`
+def write_clang_shell_script(wrapper_path: str, clang_name: str,
+                             flags: List[str]) -> None:
+    with open(wrapper_path, 'w') as wrapper:
+        wrapper.write(textwrap.dedent("""\
+            #!/bin/bash
             if [ "$1" != "-cc1" ]; then
-                "$bin_dir/{clang_name}" {' '.join(flags)} "$@"
+                `dirname $0`/{clang} {flags} "$@"
             else
                 # Target is already an argument.
-                "$bin_dir/{clang_name}" "$@"
+                `dirname $0`/{clang} "$@"
             fi
-            """
-        )
-    )
+        """.format(clang=clang_name, flags=' '.join(flags))))
 
     mode = os.stat(wrapper_path).st_mode
     os.chmod(wrapper_path, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
 
 
-def write_clang_batch_script(
-    wrapper_path: Path, clang_name: str, flags: List[str]
-) -> None:
-    wrapper_path.write_text(
-        textwrap.dedent(
-            f"""\
+def write_clang_batch_script(wrapper_path: str, clang_name: str,
+                             flags: List[str]) -> None:
+    with open(wrapper_path, 'w') as wrapper:
+        wrapper.write(textwrap.dedent("""\
             @echo off
             setlocal
             call :find_bin
-            if "%~1" == "-cc1" goto :L
+            if "%1" == "-cc1" goto :L
 
-            set "_BIN_DIR=" && "%_BIN_DIR%{clang_name}" {' '.join(flags)} %*
+            set "_BIN_DIR=" && %_BIN_DIR%{clang} {flags} %*
             if ERRORLEVEL 1 exit /b 1
             goto :done
 
             :L
             rem Target is already an argument.
-            set "_BIN_DIR=" && "%_BIN_DIR%{clang_name}" %*
+            set "_BIN_DIR=" && %_BIN_DIR%{clang} %*
             if ERRORLEVEL 1 exit /b 1
             goto :done
 
@@ -1215,14 +1764,11 @@
             exit /b
 
             :done
-            """
-        )
-    )
+        """.format(clang=clang_name, flags=' '.join(flags))))
 
 
-def write_clang_wrapper(
-    install_dir: Path, api: int, triple: str, is_windows: bool
-) -> None:
+def write_clang_wrapper(install_dir: str, api: int, triple: str,
+                        is_windows: bool) -> None:
     """Writes a target-specific Clang wrapper.
 
     This wrapper can be used to target the given architecture/API combination
@@ -1234,237 +1780,344 @@
     argv[0]), but the SDK manager can't install symlinks and Windows only
     allows administrators to create them.
     """
-    exe_suffix = ".exe" if is_windows else ""
+    exe_suffix = '.exe' if is_windows else ''
 
-    if triple.startswith("arm-linux"):
-        triple = "armv7a-linux-androideabi"
+    if triple.startswith('arm-linux'):
+        triple = 'armv7a-linux-androideabi'
 
-    wrapper_path = install_dir / "{}{}-clang".format(triple, api)
-    wrapperxx_path = wrapper_path.parent / (wrapper_path.name + "++")
+    wrapper_path = os.path.join(install_dir, '{}{}-clang'.format(triple, api))
+    wrapperxx_path = wrapper_path + '++'
 
-    flags = ["--target={}{}".format(triple, api)]
+    flags = ['--target={}{}'.format(triple, api)]
 
     # TODO: Hoist into the driver.
-    if triple.startswith("i686") and api < 24:
-        flags.append("-mstackrealign")
+    if triple.startswith('i686') and api < 24:
+        flags.append('-mstackrealign')
 
     # Write shell scripts even for Windows to support WSL and Cygwin.
-    write_clang_shell_script(wrapper_path, "clang" + exe_suffix, flags)
-    write_clang_shell_script(wrapperxx_path, "clang++" + exe_suffix, flags)
+    write_clang_shell_script(wrapper_path, 'clang' + exe_suffix, flags)
+    write_clang_shell_script(wrapperxx_path, 'clang++' + exe_suffix, flags)
     if is_windows:
-        write_clang_batch_script(
-            wrapper_path.with_suffix(".cmd"), "clang" + exe_suffix, flags
-        )
-        write_clang_batch_script(
-            wrapperxx_path.with_suffix(".cmd"), "clang++" + exe_suffix, flags
-        )
+        write_clang_batch_script(wrapper_path + '.cmd', 'clang' + exe_suffix,
+                                 flags)
+        write_clang_batch_script(wrapperxx_path + '.cmd',
+                                 'clang++' + exe_suffix, flags)
 
 
-@register
-class Toolchain(ndk.builds.Module):
-    """The LLVM toolchain.
+class BaseToolchain(ndk.builds.Module):
+    """The subset of the toolchain needed to build other toolchain components.
 
-    The toolchain includes Clang, LLD, libc++, and LLVM's binutils.
+    libc++ is built using this toolchain, and the full toolchain requires
+    libc++. The toolchain is split into BaseToolchain and Toolchain to break
+    the cyclic dependency.
     """
 
-    name = "toolchain"
+    name = 'base-toolchain'
     # This is installed to the Clang location to avoid migration pain.
-    install_path = Path("toolchains/llvm/prebuilt/{host}")
+    path = 'toolchains/llvm/prebuilt/{host}'
     notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
     deps = {
-        "clang",
-        "make",
-        "sysroot",
-        "system-stl",
-        "yasm",
+        'binutils',
+        'clang',
+        'libandroid_support',
+        'make',
+        'platforms',
+        'sysroot',
+        'system-stl',
+        'yasm',
     }
 
     @property
-    def notices(self) -> Iterator[Path]:
-        yield from Clang().notices
-        yield from Yasm().notices
-        yield from Sysroot().notices
-        yield from SystemStl().notices
-
-    @property
-    def sysroot_install_path(self) -> Path:
-        return self.get_install_path() / "sysroot"
-
-    def toolchain_libcxx_path_for(self, abi: Abi) -> Path:
-        """Returns the path to the toolchain's NDK libc++ artifacts.
-
-        The toolchain artifacts install all the libc++ artifacts to the android_libc++
-        subdirectory rather than anywhere that the driver can find them (because that's
-        still WIP). These are only included in the Linux artifacts.
-        """
-        # The libc++ directories in the toolchain artifacts use yet another spelling of
-        # each ABI.
-        libcxx_arch_name = {
-            Abi("armeabi-v7a"): "arm",
-            Abi("arm64-v8a"): "aarch64",
-            Abi("riscv64"): "riscv64",
-            Abi("x86"): "i386",
-            Abi("x86_64"): "x86_64",
-        }[abi]
-        return (
-            ClangToolchain.path_for_host(Host.Linux)
-            / "android_libc++/ndk"
-            / libcxx_arch_name
-        )
+    def notices(self) -> List[str]:
+        return (Binutils().notices + Clang().notices + Yasm().notices
+                + LibAndroidSupport().notices + Platforms().notices +
+                Sysroot().notices + SystemStl().notices)
 
     def build(self) -> None:
         pass
 
     def install(self) -> None:
         install_dir = self.get_install_path()
-        yasm_dir = self.get_dep("yasm").get_install_path()
-        sysroot_dir = self.get_dep("sysroot").get_install_path()
-        system_stl_dir = self.get_dep("system-stl").get_install_path()
+        yasm_dir = self.get_dep('yasm').get_install_path()
+        libandroid_support_dir = self.get_dep(
+            'libandroid_support').get_install_path()
+        platforms_dir = self.get_dep('platforms').get_install_path()
+        sysroot_dir = self.get_dep('sysroot').get_install_path()
+        system_stl_dir = self.get_dep('system-stl').get_install_path()
 
-        shutil.copytree(sysroot_dir, self.sysroot_install_path, dirs_exist_ok=True)
+        copy_tree(sysroot_dir, os.path.join(install_dir, 'sysroot'))
 
-        exe = ".exe" if self.host.is_windows else ""
+        exe = '.exe' if self.host.is_windows else ''
         shutil.copy2(
-            yasm_dir / "bin" / ("yasm" + exe),
-            install_dir / "bin",
-        )
+            os.path.join(yasm_dir, 'bin', 'yasm' + exe),
+            os.path.join(install_dir, 'bin'))
 
-        bin_dir = Path(install_dir) / "bin"
-        lld = bin_dir / f"ld.lld{exe}"
-        new_bin_ld = bin_dir / f"ld{exe}"
+        for arch in self.arches:
+            binutils_dir = self.get_dep('binutils').get_install_path(arch=arch)
+            copy_tree(binutils_dir, install_dir)
 
-        if self.host.is_windows:
-            shutil.copyfile(lld, new_bin_ld)
-            shutil.copystat(lld, new_bin_ld)
-        else:
-            # This reduces the size of the NDK by 60M on non-Windows.
-            os.symlink(lld.name, new_bin_ld)
+        platforms = self.get_dep('platforms')
+        assert isinstance(platforms, Platforms)
+        for api in platforms.get_apis():
+            if api in Platforms.skip_apis:
+                continue
 
-        for api in ALL_API_LEVELS:
-            for abi in ndk.abis.iter_abis_for_api(api):
-                triple = ndk.abis.abi_to_triple(abi)
+            platform = 'android-{}'.format(api)
+            for arch in platforms.get_arches(api):
+                triple = ndk.abis.arch_to_triple(arch)
+                arch_name = 'arch-{}'.format(arch)
+                lib_dir = 'lib64' if arch == 'x86_64' else 'lib'
+                src_dir = os.path.join(platforms_dir, platform, arch_name,
+                                       'usr', lib_dir)
+                dst_dir = os.path.join(install_dir, 'sysroot/usr/lib', triple,
+                                       str(api))
+                shutil.copytree(src_dir, dst_dir)
+                # TODO: Remove duplicate static libraries from this directory.
+                # We already have them in the version-generic directory.
+
+                assert isinstance(api, int)
                 write_clang_wrapper(
-                    install_dir / "bin", api, triple, self.host.is_windows
-                )
+                    os.path.join(install_dir, 'bin'), api, triple,
+                    self.host.is_windows)
 
         # Clang searches for libstdc++ headers at $GCC_PATH/../include/c++. It
         # maybe be worth adding a search for the same path within the usual
         # sysroot location to centralize these, or possibly just remove them
         # from the NDK since they aren't particularly useful anyway.
-        system_stl_hdr_dir = install_dir / "include/c++"
-        system_stl_hdr_dir.mkdir(parents=True)
-        system_stl_inc_src = system_stl_dir / "include"
-        system_stl_inc_dst = system_stl_hdr_dir / "4.9.x"
+        system_stl_hdr_dir = os.path.join(install_dir, 'include/c++')
+        os.makedirs(system_stl_hdr_dir)
+        system_stl_inc_src = os.path.join(system_stl_dir, 'include')
+        system_stl_inc_dst = os.path.join(system_stl_hdr_dir, '4.9.x')
         shutil.copytree(system_stl_inc_src, system_stl_inc_dst)
-        self.relocate_libcxx()
-        self.create_libcxx_linker_scripts()
 
-    def relocate_libcxx(self) -> None:
-        """Relocate libc++ so its discoverable by the Clang driver.
+        # $SYSROOT/usr/local/include comes before $SYSROOT/usr/include, so we
+        # can use that for libandroid_support's headers. Puting them here
+        # *does* mean that libandroid_support's headers get used even when
+        # we're not using libandroid_support, but they should be a no-op for
+        # android-21+ and in the case of pre-21 without libandroid_support
+        # (libstdc++), we're only degrading the UX; the user will get a linker
+        # error instead of a compiler error.
+        support_hdr_dir = os.path.join(install_dir, 'sysroot/usr/local')
+        os.makedirs(support_hdr_dir)
+        support_inc_src = os.path.join(libandroid_support_dir, 'include')
+        support_inc_dst = os.path.join(support_hdr_dir, 'include')
+        shutil.copytree(support_inc_src, support_inc_dst)
 
-        The NDK libc++ in the toolchain prebuilts is not installed to a location that
-        the driver is able to find by default. Move it to a driver searched directory.
-        """
-        # The Clang driver automatically uses the following library search directories
-        # (relative to the LLVM install root, for an aarch64-linux-android21 target and
-        # LLVM 17):
-        #
-        # 1. lib/clang/17/lib/linux/aarch64
-        # 2. bin/../sysroot/usr/lib/aarch64-linux-android/21
-        # 3. bin/../sysroot/usr/lib/aarch64-linux-android
-        # 4. bin/../sysroot/usr/lib
-        #
-        # The sysroot directory comes from the platform's sysroot artifact, so it's best
-        # to avoid installing to that (if we install there, the platform's artifact
-        # can't be used directly; it needs to have NDK components installed to it).
-        #
-        # However, AGP (and probably other systems) expect to find libc++_shared.so in
-        # sysroot/usr/lib/$TRIPLE, so we should continue using that path for the time
-        # being. At some point we should move all the libc++ details into the
-        # toolchain's directories so it's easier to use an arbitrary sysroot (e.g. for
-        # previewing Android APIs without needing a whole new NDK), but we can't do that
-        # for the headers yet anyway (see below). Keep compatible for now.
-        usr_lib = self.sysroot_install_path / "usr/lib"
-        for abi in ALL_ABIS:
-            dest = usr_lib / ndk.abis.abi_to_triple(abi)
-            src = self.toolchain_libcxx_path_for(abi) / "lib"
-            for lib in src.iterdir():
-                shutil.copy2(lib, dest / lib.name)
 
-        # libc++ headers for Android will currently only be found in the sysroot:
-        # https://github.com/llvm/llvm-project/blob/c64f10bfe20308ebc7d5d18912cd0ba82a44eaa1/clang/lib/Driver/ToolChains/Gnu.cpp#L3080-L3084
-        #
-        # We ought to revert that driver behavior (which shouldn't be contentious, since
-        # it's our patch in the first place), but for now we'll continue installing the
-        # libc++ headers to the sysroot.
-        src = ClangToolchain.path_for_host(Host.Linux) / "include/c++/v1"
-        dest = self.sysroot_install_path / "usr/include/c++/v1"
-        if dest.exists():
-            shutil.rmtree(dest)
-        dest.parent.mkdir(parents=True, exist_ok=True)
-        shutil.copytree(src, dest)
+class Vulkan(ndk.builds.Module):
+    name = 'vulkan'
+    path = 'sources/third_party/vulkan'
 
-        # There's also an Android-specific __config_site header that we need to install.
-        shutil.copy2(self.find_libcxx_config_site(), dest / "__config_site")
+    @property
+    def notices(self) -> List[str]:
+        base = ndk.paths.android_path('external')
+        headers_dir = os.path.join(base, 'vulkan-headers')
+        layers_dir = os.path.join(base, 'vulkan-validation-layers')
+        tools_dir = os.path.join(base, 'vulkan-tools')
+        return [
+            os.path.join(headers_dir, 'NOTICE'),
+            os.path.join(layers_dir, 'NOTICE'),
+            os.path.join(tools_dir, 'NOTICE')
+        ]
 
-    def find_libcxx_config_site(self) -> Path:
-        """Finds the __config_site file for the NDK libc++.
+    def build(self) -> None:
+        print('Constructing Vulkan validation layer source...')
+        vulkan_root_dir = ndk.paths.android_path(
+            'external/vulkan-validation-layers')
+        vulkan_headers_root_dir = ndk.paths.android_path(
+            'external/vulkan-headers')
+        vulkan_tools_root_dir = ndk.paths.android_path(
+            'external/vulkan-tools')
 
-        That header exists per-ABI in the android_libc++ directory, but they should all
-        be identical and the driver doesn't search per-ABI include directories for
-        libc++. Verify that they are actually identical and return one of them
-        arbitrarily.
-        """
-        config_sites: list[Path] = []
-        for abi in ALL_ABIS:
-            includes = self.toolchain_libcxx_path_for(abi) / "include"
-            config_sites.extend(includes.glob("**/__config_site"))
-        first = config_sites[0]
-        contents = first.read_bytes()
-        for config_site in config_sites[1:]:
-            if config_site.read_bytes() != contents:
-                raise RuntimeError(
-                    f"Expected all NDK __config_site files to be identical. {first} "
-                    f"and {config_site} have different contents."
-                )
-        return first
+        copies = [
+            {
+                'source_dir': vulkan_root_dir,
+                'dest_dir': 'vulkan/src',
+                'files': [
+                ],
+                'dirs': [
+                    'layers', 'tests', 'scripts'
+                ],
+            },
+            {
+                'source_dir': vulkan_headers_root_dir,
+                'dest_dir': 'vulkan/src',
+                'files': [
+                ],
+                'dirs': [
+                    'include', 'registry'
+                ],
+            },
+            {
+                'source_dir': vulkan_tools_root_dir,
+                'dest_dir': 'vulkan/src',
+                'files': [
+                ],
+                'dirs': [
+                    'common'
+                ],
+            }
+        ]
 
-    def create_libcxx_linker_scripts(self) -> None:
-        """Install per-target linker scripts for libc++.so and libc++.a.
+        default_ignore_patterns = shutil.ignore_patterns(
+            "*CMakeLists.txt",
+            "*test.cc",
+            "linux",
+            "windows")
 
-        Clang is going to try to use `-lc++`, not `-lc++_shared` or
-        `-lc++_static -lc++abi`. Linker scripts paper over those details.
+        base_vulkan_path = os.path.join(self.out_dir, 'vulkan')
+        vulkan_path = os.path.join(base_vulkan_path, 'src')
+        for properties in copies:
+            source_dir = properties['source_dir']
+            assert isinstance(source_dir, str)
+            assert isinstance(properties['dest_dir'], str)
+            dest_dir = os.path.join(self.out_dir, properties['dest_dir'])
+            for d in properties['dirs']:
+                src = os.path.join(source_dir, d)
+                dst = os.path.join(dest_dir, d)
+                shutil.rmtree(dst, True)
+                shutil.copytree(src, dst,
+                                ignore=default_ignore_patterns)
+            for f in properties['files']:
+                install_file(f, source_dir, dest_dir)
 
-        These are per-target for historical reasons (pre-21 needed libandroid_support,
-        arm32 needed libunwind). These could probably be reduced to a single linker
-        script now.
-        """
+        # Copy Android build components
+        print('Copying Vulkan build components...')
+        src = os.path.join(vulkan_root_dir, 'build-android')
+        dst = os.path.join(vulkan_path, 'build-android')
+        shutil.rmtree(dst, True)
+        shutil.copytree(src, dst, ignore=default_ignore_patterns)
+        print('Copying finished')
+
+        # Copy binary validation layer libraries
+        print('Copying Vulkan binary validation layers...')
+        src = ndk.paths.android_path('prebuilts/ndk/vulkan-validation-layers')
+        dst = os.path.join(vulkan_path, 'build-android/jniLibs')
+        shutil.rmtree(dst, True)
+        shutil.copytree(src, dst, ignore=default_ignore_patterns)
+        print('Copying finished')
+
+        # TODO: Verify source packaged properly
+        print('Packaging Vulkan source...')
+        src = os.path.join(self.out_dir, 'vulkan')
+        build_support.make_package('vulkan', src, self.dist_dir)
+        print('Packaging Vulkan source finished')
+
+
+class Toolchain(ndk.builds.Module):
+    """The complete toolchain.
+
+    BaseToolchain installs the core of the toolchain. This module installs the
+    STL to that toolchain.
+    """
+
+    name = 'toolchain'
+    # This is installed to the Clang location to avoid migration pain.
+    path = 'toolchains/llvm/prebuilt/{host}'
+    notice_group = ndk.builds.NoticeGroup.TOOLCHAIN
+    deps = {
+        'base-toolchain',
+        'libc++',
+        'libc++abi',
+        'platforms',
+    }
+
+    @property
+    def notices(self) -> List[str]:
+        return Libcxx().notices + Libcxxabi().notices
+
+    def build(self) -> None:
+        pass
+
+    def install(self) -> None:
         install_dir = self.get_install_path()
-        for api in ALL_API_LEVELS:
-            for abi in ndk.abis.iter_abis_for_api(api):
-                triple = ndk.abis.abi_to_triple(abi)
-                dst_dir = install_dir / "sysroot/usr/lib" / triple / str(api)
+        libcxx_dir = self.get_dep('libc++').get_install_path()
+        libcxxabi_dir = self.get_dep('libc++abi').get_install_path()
 
-                (dst_dir / "libc++.so").write_text("INPUT(-lc++_shared)")
-                (dst_dir / "libc++.a").write_text("INPUT(-lc++_static -lc++abi)")
+        libcxx_hdr_dir = os.path.join(install_dir, 'sysroot/usr/include/c++')
+        os.makedirs(libcxx_hdr_dir)
+        libcxx_inc_src = os.path.join(libcxx_dir, 'include')
+        libcxx_inc_dst = os.path.join(libcxx_hdr_dir, 'v1')
+        shutil.copytree(libcxx_inc_src, libcxx_inc_dst)
+
+        libcxxabi_inc_src = os.path.join(libcxxabi_dir, 'include')
+        copy_tree(libcxxabi_inc_src, libcxx_inc_dst)
+
+        for arch in self.arches:
+            # We need to replace libgcc with linker scripts that also use
+            # libunwind on arm32. We already get libgcc from copying binutils,
+            # but re-install it so we get the linker scripts.
+            #
+            # This needs to be done here rather than in BaseToolchain because
+            # libunwind isn't available until libc++ has been built.
+            for subarch in get_subarches(arch):
+                install_libgcc(
+                    install_dir, self.host, arch, subarch, new_layout=True)
+
+            triple = ndk.abis.arch_to_triple(arch)
+            abi, = ndk.abis.arch_to_abis(arch)
+            libcxx_lib_dir = os.path.join(libcxx_dir, 'libs', abi)
+            sysroot_dst = os.path.join(install_dir, 'sysroot/usr/lib', triple)
+
+            libs = [
+                'libc++_shared.so',
+                'libc++_static.a',
+                'libc++abi.a',
+            ]
+            if arch == 'arm':
+                libs.append('libunwind.a')
+            if abi in ndk.abis.LP32_ABIS:
+                libs.append('libandroid_support.a')
+
+            for lib in libs:
+                shutil.copy2(os.path.join(libcxx_lib_dir, lib), sysroot_dst)
+
+        platforms = self.get_dep('platforms')
+        assert isinstance(platforms, Platforms)
+        for api in platforms.get_apis():
+            if api in Platforms.skip_apis:
+                continue
+
+            for arch in platforms.get_arches(api):
+                triple = ndk.abis.arch_to_triple(arch)
+                dst_dir = os.path.join(install_dir, 'sysroot/usr/lib', triple,
+                                       str(api))
+
+                # Also install a libc++.so and libc++.a linker script per API
+                # level. We need this to be done on a per-API level basis
+                # because libandroid_support is only used on pre-21 API levels.
+                static_script = ['-lc++_static', '-lc++abi']
+                shared_script = ['-lc++_shared']
+                assert isinstance(api, int)
+                if api < 21:
+                    static_script.append('-landroid_support')
+                    shared_script.insert(0, '-landroid_support')
+
+                libcxx_so_path = os.path.join(dst_dir, 'libc++.so')
+                with open(libcxx_so_path, 'w') as script:
+                    script.write('INPUT({})'.format(' '.join(shared_script)))
+
+                libcxx_a_path = os.path.join(dst_dir, 'libc++.a')
+                with open(libcxx_a_path, 'w') as script:
+                    script.write('INPUT({})'.format(' '.join(static_script)))
 
 
 def make_format_value(value: Any) -> Any:
     if isinstance(value, list):
-        return " ".join(value)
+        return ' '.join(value)
     return value
 
 
 def var_dict_to_make(var_dict: Dict[str, Any]) -> str:
     lines = []
     for name, value in var_dict.items():
-        lines.append("{} := {}".format(name, make_format_value(value)))
+        lines.append('{} := {}'.format(name, make_format_value(value)))
     return os.linesep.join(lines)
 
 
 def cmake_format_value(value: Any) -> Any:
     if isinstance(value, list):
-        return ";".join(value)
+        return ';'.join(value)
     return value
 
 
@@ -1475,316 +2128,279 @@
     return os.linesep.join(lines)
 
 
-def abis_meta_transform(metadata: dict[str, Any]) -> dict[str, Any]:
+def abis_meta_transform(metadata: Dict) -> Dict[str, Any]:
     default_abis = []
     deprecated_abis = []
     lp32_abis = []
     lp64_abis = []
-    abi_infos = {}
     for abi, abi_data in metadata.items():
-        bitness = abi_data["bitness"]
+        bitness = abi_data['bitness']
         if bitness == 32:
             lp32_abis.append(abi)
         elif bitness == 64:
             lp64_abis.append(abi)
         else:
-            raise ValueError("{} bitness is unsupported value: {}".format(abi, bitness))
+            raise ValueError('{} bitness is unsupported value: {}'.format(
+                abi, bitness))
 
-        if abi_data["default"]:
+        if abi_data['default']:
             default_abis.append(abi)
 
-        if abi_data["deprecated"]:
+        if abi_data['deprecated']:
             deprecated_abis.append(abi)
 
-        proc = abi_data["proc"]
-        arch = abi_data["arch"]
-        triple = abi_data["triple"]
-        llvm_triple = abi_data["llvm_triple"]
-        abi_infos[f"NDK_ABI_{abi}_PROC"] = proc
-        abi_infos[f"NDK_ABI_{abi}_ARCH"] = arch
-        abi_infos[f"NDK_ABI_{abi}_TRIPLE"] = triple
-        abi_infos[f"NDK_ABI_{abi}_LLVM_TRIPLE"] = llvm_triple
-        abi_infos[f"NDK_ABI_{abi}_MIN_OS_VERSION"] = int(abi_data["min_os_version"])
-
     meta_vars = {
-        "NDK_DEFAULT_ABIS": sorted(default_abis),
-        "NDK_DEPRECATED_ABIS": sorted(deprecated_abis),
-        "NDK_KNOWN_DEVICE_ABI32S": sorted(lp32_abis),
-        "NDK_KNOWN_DEVICE_ABI64S": sorted(lp64_abis),
-        "NDK_KNOWN_DEVICE_ABIS": sorted(lp32_abis + lp64_abis),
+        'NDK_DEFAULT_ABIS': sorted(default_abis),
+        'NDK_DEPRECATED_ABIS': sorted(deprecated_abis),
+        'NDK_KNOWN_DEVICE_ABI32S': sorted(lp32_abis),
+        'NDK_KNOWN_DEVICE_ABI64S': sorted(lp64_abis),
     }
-    meta_vars.update(abi_infos)
 
     return meta_vars
 
 
-def platforms_meta_transform(metadata: dict[str, Any]) -> dict[str, Any]:
+def platforms_meta_transform(metadata: Dict) -> Dict[str, Any]:
     meta_vars = {
-        "NDK_MIN_PLATFORM_LEVEL": metadata["min"],
-        "NDK_MAX_PLATFORM_LEVEL": metadata["max"],
+        'NDK_MIN_PLATFORM_LEVEL': metadata['min'],
+        'NDK_MAX_PLATFORM_LEVEL': metadata['max'],
     }
 
-    for src, dst in metadata["aliases"].items():
-        name = "NDK_PLATFORM_ALIAS_{}".format(src)
-        value = "android-{}".format(dst)
+    for src, dst in metadata['aliases'].items():
+        name = 'NDK_PLATFORM_ALIAS_{}'.format(src)
+        value = 'android-{}'.format(dst)
         meta_vars[name] = value
     return meta_vars
 
 
-def system_libs_meta_transform(metadata: dict[str, Any]) -> dict[str, Any]:
+def system_libs_meta_transform(metadata: Dict) -> Dict[str, Any]:
     # This file also contains information about the first supported API level
     # for each library. We could use this to provide better diagnostics in
     # ndk-build, but currently do not.
-    return {"NDK_SYSTEM_LIBS": sorted(metadata.keys())}
+    return {'NDK_SYSTEM_LIBS': sorted(metadata.keys())}
 
 
-@register
 class NdkBuild(ndk.builds.PackageModule):
-    name = "ndk-build"
-    install_path = Path("build")
-    src = NDK_DIR / "build"
-    notice = NDK_DIR / "NOTICE"
+    name = 'ndk-build'
+    path = 'build'
+    src = ndk.paths.ndk_path('build')
+    notice = ndk.paths.ndk_path('NOTICE')
 
     deps = {
-        "meta",
-        "clang",
+        'meta',
     }
 
     def install(self) -> None:
         super().install()
 
         self.install_ndk_version_makefile()
-        self.generate_cmake_compiler_id()
 
-        self.generate_language_specific_metadata("abis", abis_meta_transform)
+        self.generate_language_specific_metadata('abis', abis_meta_transform)
 
-        self.generate_language_specific_metadata("platforms", platforms_meta_transform)
+        self.generate_language_specific_metadata('platforms',
+                                                 platforms_meta_transform)
 
-        self.generate_language_specific_metadata(
-            "system_libs", system_libs_meta_transform
-        )
+        self.generate_language_specific_metadata('system_libs',
+                                                 system_libs_meta_transform)
 
     def install_ndk_version_makefile(self) -> None:
         """Generates a version.mk for ndk-build."""
-        version_mk = Path(self.get_install_path()) / "core/version.mk"
-        version_mk.write_text(
-            textwrap.dedent(
-                f"""\
+        version_mk = Path(self.get_install_path()) / 'core/version.mk'
+        version_mk.write_text(textwrap.dedent(f"""\
             NDK_MAJOR := {ndk.config.major}
             NDK_MINOR := {ndk.config.hotfix}
             NDK_BETA := {ndk.config.beta}
             NDK_CANARY := {str(ndk.config.canary).lower()}
-            """
-            )
-        )
-
-    @staticmethod
-    def get_clang_version(clang: Path) -> str:
-        """Invokes Clang to determine its version string."""
-        result = subprocess.run(
-            [str(clang), "--version"], capture_output=True, encoding="utf-8", check=True
-        )
-        version_line = result.stdout.splitlines()[0]
-        # Format of the version line is:
-        # Android ($BUILD, based on $REV) clang version x.y.z ($GIT_URL $SHA)
-        match = re.search(r"clang version ([0-9.]+)\s", version_line)
-        if match is None:
-            raise RuntimeError(f"Could not find Clang version in:\n{result.stdout}")
-        return match.group(1)
-
-    def generate_cmake_compiler_id(self) -> None:
-        """Generates compiler ID information for old versions of CMake."""
-        compiler_id_file = Path(self.get_install_path()) / "cmake/compiler_id.cmake"
-        clang_prebuilts = Path(self.get_dep("clang").get_build_host_install())
-        clang = clang_prebuilts / "bin/clang"
-        clang_version = self.get_clang_version(clang)
-
-        compiler_id_file.write_text(
-            textwrap.dedent(
-                f"""\
-            # The file is automatically generated when the NDK is built.
-            set(CMAKE_ASM_COMPILER_VERSION {clang_version})
-            set(CMAKE_C_COMPILER_VERSION {clang_version})
-            set(CMAKE_CXX_COMPILER_VERSION {clang_version})
-            """
-            )
-        )
+            """))
 
     def generate_language_specific_metadata(
-        self, name: str, func: Callable[[dict[str, Any]], dict[str, Any]]
-    ) -> None:
+            self, name: str, func: Callable[[Dict], Dict[str, Any]]) -> None:
         install_path = self.get_install_path()
-        json_path = self.get_dep("meta").get_install_path() / (name + ".json")
-        with json_path.open(encoding="utf-8") as json_file:
-            meta = json.load(json_file)
+        json_path = os.path.join(
+            self.get_dep('meta').get_install_path(), name + '.json')
+        meta = json.loads(ndk.file.read_file(json_path))
         meta_vars = func(meta)
 
-        (install_path / f"core/{name}.mk").write_text(var_dict_to_make(meta_vars))
-        (install_path / f"cmake/{name}.cmake").write_text(var_dict_to_cmake(meta_vars))
+        ndk.file.write_file(
+            os.path.join(install_path, 'core/{}.mk'.format(name)),
+            var_dict_to_make(meta_vars))
+        ndk.file.write_file(
+            os.path.join(install_path, 'cmake/{}.cmake'.format(name)),
+            var_dict_to_cmake(meta_vars))
 
 
-@register
 class PythonPackages(ndk.builds.PackageModule):
-    name = "python-packages"
-    install_path = Path("python-packages")
-    src = ANDROID_DIR / "development/python-packages"
+    name = 'python-packages'
+    path = 'python-packages'
+    src = ndk.paths.android_path('development/python-packages')
 
 
-@register
 class SystemStl(ndk.builds.PackageModule):
-    name = "system-stl"
-    install_path = Path("sources/cxx-stl/system")
-    src = NDK_DIR / "sources/cxx-stl/system"
+    name = 'system-stl'
+    path = 'sources/cxx-stl/system'
+    src = ndk.paths.ndk_path('sources/cxx-stl/system')
 
 
-@register
+class LibAndroidSupport(ndk.builds.PackageModule):
+    name = 'libandroid_support'
+    path = 'sources/android/support'
+    src = ndk.paths.ndk_path('sources/android/support')
+
+
+class Libcxxabi(ndk.builds.PackageModule):
+    name = 'libc++abi'
+    path = 'sources/cxx-stl/llvm-libc++abi'
+    src = ndk.paths.android_path('external/libcxxabi')
+
+
 class SimplePerf(ndk.builds.Module):
-    name = "simpleperf"
-    install_path = Path("simpleperf")
-    notice = ANDROID_DIR / "prebuilts/simpleperf/NOTICE"
+    name = 'simpleperf'
+    path = 'simpleperf'
+    notice = ndk.paths.android_path('prebuilts/simpleperf/NOTICE')
 
     def build(self) -> None:
-        pass
-
-    def install(self) -> None:
-        print("Installing simpleperf...")
-        install_dir = self.get_install_path()
-        if install_dir.exists():
+        print('Building simpleperf...')
+        install_dir = os.path.join(self.out_dir, 'simpleperf')
+        if os.path.exists(install_dir):
             shutil.rmtree(install_dir)
-        install_dir.mkdir(parents=True)
+        os.makedirs(install_dir)
 
-        simpleperf_path = ndk.paths.android_path("prebuilts/simpleperf")
-        dirs = [
-            Path("app_api"),
-            Path("bin/android"),
-            Path("doc"),
-            Path("inferno"),
-            Path("proto"),
-            Path("purgatorio"),
-        ]
-        host_bin_dir = "windows" if self.host.is_windows else self.host.value
-        dirs.append(Path("bin") / host_bin_dir)
+        simpleperf_path = ndk.paths.android_path('prebuilts/simpleperf')
+        dirs = ['doc', 'inferno', 'bin/android', 'app_api']
+        host_bin_dir = 'windows' if self.host.is_windows else self.host.value
+        dirs.append(os.path.join('bin/', host_bin_dir))
         for d in dirs:
-            shutil.copytree(simpleperf_path / d, install_dir / d)
+            shutil.copytree(os.path.join(simpleperf_path, d),
+                            os.path.join(install_dir, d))
 
         for item in os.listdir(simpleperf_path):
             should_copy = False
-            if item.endswith(".py") and item != "update.py":
+            if item.endswith('.py') and item not in ['update.py', 'test.py']:
                 should_copy = True
-            elif item == "report_html.js":
+            elif item == 'report_html.js':
                 should_copy = True
-            elif item == "inferno.sh" and not self.host.is_windows:
+            elif item == 'inferno.sh' and not self.host.is_windows:
                 should_copy = True
-            elif item == "inferno.bat" and self.host.is_windows:
+            elif item == 'inferno.bat' and self.host.is_windows:
                 should_copy = True
             if should_copy:
-                shutil.copy2(simpleperf_path / item, install_dir)
+                shutil.copy2(os.path.join(simpleperf_path, item), install_dir)
 
-        shutil.copy2(simpleperf_path / "ChangeLog", install_dir)
+        shutil.copy2(os.path.join(simpleperf_path, 'ChangeLog'), install_dir)
+        build_support.make_package('simpleperf', install_dir, self.dist_dir)
 
 
-@register
+class RenderscriptLibs(ndk.builds.PackageModule):
+    name = 'renderscript-libs'
+    path = 'sources/android/renderscript'
+    src = ndk.paths.ndk_path('sources/android/renderscript')
+
+
+class RenderscriptToolchain(ndk.builds.InvokeBuildModule):
+    name = 'renderscript-toolchain'
+    path = 'toolchains/renderscript/prebuilt/{host}'
+    script = 'build-renderscript.py'
+
+    @property
+    def notices(self) -> List[str]:
+        base = ndk.paths.android_path('prebuilts/renderscript/host')
+        return [
+            os.path.join(base, 'darwin-x86/current/NOTICE'),
+            os.path.join(base, 'linux-x86/current/NOTICE'),
+            os.path.join(base, 'windows-x86/current/NOTICE'),
+        ]
+
+
 class Changelog(ndk.builds.FileModule):
-    name = "changelog"
-    install_path = Path("CHANGELOG.md")
-    src = NDK_DIR / f"docs/changelogs/Changelog-r{ndk.config.major}.md"
+    name = 'changelog'
+    path = 'CHANGELOG.md'
+    src = ndk.paths.ndk_path('docs/changelogs/Changelog-r{}.md'.format(
+        ndk.config.major))
     no_notice = True
 
 
-@register
-class NdkGdb(ndk.builds.PythonApplication):
-    name = "ndk-gdb"
-    install_path = Path("prebuilt/{host}/bin/ndkgdb.pyz")
-    notice = NDK_DIR / "NOTICE"
-    package = NDK_DIR / "ndkgdb.py"
-    main = "ndkgdb:main"
-    py_pkg_deps = [
-        ANDROID_DIR / "development/python-packages/adb/adb",
-        ANDROID_DIR / "development/python-packages/gdbrunner/gdbrunner",
-    ]
-    deps = {"ndk-gdb-shortcut", "ndk-lldb-shortcut"}
+class NdkGdb(ndk.builds.MultiFileModule):
+    name = 'ndk-gdb'
+    path = 'prebuilt/{host}/bin'
+    notice = ndk.paths.ndk_path('NOTICE')
+
+    @property
+    def files(self) -> Iterable[str]:
+        files = [
+            ndk.paths.ndk_path('ndk-gdb'),
+            ndk.paths.ndk_path('ndk-gdb.py'),
+        ]
+
+        if self.host.is_windows:
+            files.append(ndk.paths.ndk_path('ndk-gdb.cmd'))
+
+        return files
 
 
-@register
 class NdkGdbShortcut(ndk.builds.ScriptShortcutModule):
-    name = "ndk-gdb-shortcut"
-    install_path = Path("ndk-gdb")
-    script = Path("prebuilt/{host}/bin/ndk-gdb")
-    windows_ext = ".cmd"
+    name = 'ndk-gdb-shortcut'
+    path = 'ndk-gdb'
+    script = 'prebuilt/{host}/bin/ndk-gdb'
+    windows_ext = '.cmd'
 
 
-@register
-class NdkLldbShortcut(ndk.builds.ScriptShortcutModule):
-    name = "ndk-lldb-shortcut"
-    install_path = Path("ndk-lldb")
-    script = Path("prebuilt/{host}/bin/ndk-gdb")
-    windows_ext = ".cmd"
+class NdkStack(ndk.builds.MultiFileModule):
+    name = 'ndk-stack'
+    path = 'prebuilt/{host}/bin'
+    notice = ndk.paths.ndk_path('NOTICE')
+
+    @property
+    def files(self) -> Iterable[str]:
+        files = [
+            ndk.paths.ndk_path('ndk-stack'),
+            ndk.paths.ndk_path('ndk-stack.py'),
+        ]
+
+        if self.host.is_windows:
+            files.append(ndk.paths.ndk_path('ndk-stack.cmd'))
+
+        return files
 
 
-@register
-class NdkStack(ndk.builds.PythonApplication):
-    name = "ndk-stack"
-    install_path = Path("prebuilt/{host}/bin/ndkstack.pyz")
-    notice = NDK_DIR / "NOTICE"
-    package = NDK_DIR / "ndkstack.py"
-    main = "ndkstack:main"
-    deps = {
-        # PythonApplication depends on build/tools/ndk_bin_common.sh.
-        "ndk-build",
-        "ndk-stack-shortcut",
-        # PythonApplication depends on Python, which is bundled with Clang.
-        "toolchain",
-    }
-
-
-@register
 class NdkStackShortcut(ndk.builds.ScriptShortcutModule):
-    name = "ndk-stack-shortcut"
-    install_path = Path("ndk-stack")
-    script = Path("prebuilt/{host}/bin/ndk-stack")
-    windows_ext = ".cmd"
+    name = 'ndk-stack-shortcut'
+    path = 'ndk-stack'
+    script = 'prebuilt/{host}/bin/ndk-stack'
+    windows_ext = '.cmd'
 
 
-@register
 class NdkWhichShortcut(ndk.builds.ScriptShortcutModule):
-    name = "ndk-which-shortcut"
-    install_path = Path("ndk-which")
-    script = Path("prebuilt/{host}/bin/ndk-which")
-    windows_ext = ""  # There isn't really a Windows ndk-which.
+    name = 'ndk-which-shortcut'
+    path = 'ndk-which'
+    script = 'prebuilt/{host}/bin/ndk-which'
+    windows_ext = ''  # There isn't really a Windows ndk-which.
 
 
-@register
 class NdkBuildShortcut(ndk.builds.ScriptShortcutModule):
-    name = "ndk-build-shortcut"
-    install_path = Path("ndk-build")
-    script = Path("build/ndk-build")
-    windows_ext = ".cmd"
-    disallow_windows_install_path_with_spaces = True
+    name = 'ndk-build-shortcut'
+    path = 'ndk-build'
+    script = 'build/ndk-build'
+    windows_ext = '.cmd'
 
 
-@register
 class Readme(ndk.builds.FileModule):
-    name = "readme"
-    install_path = Path("README.md")
-    src = NDK_DIR / "UserReadme.md"
+    name = 'readme'
+    path = 'README.md'
+    src = ndk.paths.ndk_path('UserReadme.md')
 
 
-CANARY_TEXT = textwrap.dedent(
-    """\
+CANARY_TEXT = textwrap.dedent("""\
     This is a canary build of the Android NDK. It's updated almost every day.
 
     Canary builds are designed for early adopters and can be prone to breakage.
     Sometimes they can break completely. To aid development and testing, this
     distribution can be installed side-by-side with your existing, stable NDK
     release.
-    """
-)
+    """)
 
 
-@register
 class CanaryReadme(ndk.builds.Module):
-    name = "canary-readme"
-    install_path = Path("README.canary")
+    name = 'canary-readme'
+    path = 'README.canary'
     no_notice = True
 
     def build(self) -> None:
@@ -1792,92 +2408,52 @@
 
     def install(self) -> None:
         if ndk.config.canary:
-            self.get_install_path().write_text(CANARY_TEXT)
+            canary_path = self.get_install_path()
+            with open(canary_path, 'w') as canary_file:
+                canary_file.write(CANARY_TEXT)
 
 
-@register
 class Meta(ndk.builds.PackageModule):
-    name = "meta"
-    install_path = Path("meta")
-    src = NDK_DIR / "meta"
+    name = 'meta'
+    path = 'meta'
+    src = ndk.paths.ndk_path('meta')
     no_notice = True
 
     deps = {
-        "toolchain",
+        'base-toolchain',
     }
 
-    @staticmethod
-    def find_max_api_level_in_prebuilts() -> int:
-        max_api = 0
-        for path in PREBUILT_SYSROOT.glob("usr/lib/*/*"):
-            if not path.is_dir():
-                continue
-
-            try:
-                api = int(path.name)
-                max_api = max(max_api, api)
-            except ValueError as ex:
-                # Codenamed release like android-O, android-O-MR1, etc.
-                # Codenamed APIs are not supported, since having
-                # non-integer API directories breaks all kinds of tools, we
-                # rename them when we check them in.
-                raise ValueError(
-                    f"Codenamed APIs are not allowed: {path}\n"
-                    "Use the update_platform.py tool from the "
-                    "platform/prebuilts/ndk dev branch to remove or rename it."
-                ) from ex
-
-        return max_api
-
-    def validate(self) -> None:
-        super().validate()
-
-        max_sysroot_api = self.find_max_api_level_in_prebuilts()
-        if max_sysroot_api != MAX_API_LEVEL:
-            raise RuntimeError(
-                f"API {max_sysroot_api} is the newest API level in {PREBUILT_SYSROOT} "
-                f"sysroot but does not match meta/platforms.json max of {MAX_API_LEVEL}"
-            )
-        if max_sysroot_api not in API_LEVEL_ALIASES.values():
-            raise RuntimeError(
-                f"API {max_sysroot_api} is the newest API level in {PREBUILT_SYSROOT} "
-                "but has no alias in meta/platforms.json."
-            )
-
     def install(self) -> None:
         super().install()
         self.create_system_libs_meta()
-        self.add_min_api_data_to_abis()
 
     def create_system_libs_meta(self) -> None:
         # Build system_libs.json based on what we find in the toolchain. We
         # only need to scan a single 32-bit architecture since these libraries
         # do not vary in availability across architectures.
-        sysroot_base = (
-            self.get_dep("toolchain").get_install_path()
-            / "sysroot/usr/lib/arm-linux-androideabi"
-        )
+        sysroot_base = os.path.join(
+            self.get_dep('base-toolchain').get_install_path(),
+            'sysroot/usr/lib/arm-linux-androideabi')
 
         system_libs: Dict[str, str] = {}
         for api_name in sorted(os.listdir(sysroot_base)):
-            path = sysroot_base / api_name
+            path = os.path.join(sysroot_base, api_name)
 
             # There are also non-versioned libraries in this directory.
-            if not path.is_dir():
+            if not os.path.isdir(path):
                 continue
 
             for lib in os.listdir(path):
                 # Don't include CRT objects in the list.
-                if not lib.endswith(".so"):
+                if not lib.endswith('.so'):
                     continue
 
-                if not lib.startswith("lib"):
+                if not lib.startswith('lib'):
                     raise RuntimeError(
-                        "Found unexpected file in sysroot: {}".format(lib)
-                    )
+                        'Found unexpected file in sysroot: {}'.format(lib))
 
                 # libc++.so is a linker script, not a system library.
-                if lib == "libc++.so":
+                if lib == 'libc++.so':
                     continue
 
                 # We're processing each version directory in sorted order, so
@@ -1890,34 +2466,21 @@
 
         system_libs = collections.OrderedDict(sorted(system_libs.items()))
 
-        json_path = self.get_install_path() / "system_libs.json"
-        with json_path.open("w", encoding="utf-8") as json_file:
-            json.dump(system_libs, json_file, indent=2, separators=(",", ": "))
-
-    def add_min_api_data_to_abis(self) -> None:
-        json_path = self.get_install_path() / "abis.json"
-        with json_path.open(encoding="utf-8") as json_file:
-            data = json.load(json_file)
-
-        for abi_name, abi_data in data.items():
-            abi_data["min_os_version"] = ndk.abis.min_api_for_abi(Abi(abi_name))
-
-        with json_path.open("w", encoding="utf-8") as json_file:
-            json.dump(data, json_file, indent=2, separators=(",", ": "))
+        json_path = os.path.join(self.get_install_path(), 'system_libs.json')
+        with open(json_path, 'w') as json_file:
+            json.dump(system_libs, json_file, indent=2, separators=(',', ': '))
 
 
-@register
 class WrapSh(ndk.builds.PackageModule):
-    name = "wrap.sh"
-    install_path = Path("wrap.sh")
-    src = NDK_DIR / "wrap.sh"
+    name = 'wrap.sh'
+    path = 'wrap.sh'
+    src = ndk.paths.ndk_path('wrap.sh')
     no_notice = True
 
 
-@register
 class SourceProperties(ndk.builds.Module):
-    name = "source.properties"
-    install_path = Path("source.properties")
+    name = 'source.properties'
+    path = 'source.properties'
     no_notice = True
 
     def build(self) -> None:
@@ -1925,44 +2488,39 @@
 
     def install(self) -> None:
         path = self.get_install_path()
-        assert self.context is not None
-        version = get_version_string(self.context.build_number)
-        if ndk.config.beta > 0:
-            version += "-beta{}".format(ndk.config.beta)
-
-        # This file is read by the release tooling to populate the SDK manifest. Some of
-        # these properties (Pkg.Desc and Pkg.Revision) will populate fields that the SDK
-        # manager UI in Android Studio will use as the display name and categorization,
-        # so the formats of those should not change.
-        #
-        # Pkg.BaseRevision determines the install location within the SDK
-        # directory when installed by the SDK manager, and the name that shows
-        # up in the "Name" column (the format of which has an impact on how
-        # Studio groups packages, as there is no explicit grouping). This must
-        # be $MAJOR.$HOTFIX.$BUILD with no beta, RC, or canary information, or
-        # else the SDK manager will install it to a location other than what AGP
-        # expects, and the SDK manager will not group it correctly in the
-        # details panel.
-        #
-        # The rest is up to us. We can add new fields that can be used in the release
-        # configs. Pkg.ReleaseName, for example, is used to populate that portion of the
-        # name of the zip file produced by the release.
-        version_number = (
-            f"{ndk.config.major}.{ndk.config.hotfix}.{self.context.build_number}"
-        )
-        path.write_text(
-            textwrap.dedent(
-                f"""\
-                Pkg.Desc = Android NDK
-                Pkg.Revision = {version}
-                Pkg.BaseRevision = {version_number}
-                Pkg.ReleaseName = {ndk.config.release}
-                """
-            )
-        )
+        with open(path, 'w') as source_properties:
+            assert self.context is not None
+            build = self.context.build_number
+            if build == 'dev':
+                build = '0'
+            version = '{}.{}.{}'.format(
+                ndk.config.major, ndk.config.hotfix, build)
+            if ndk.config.beta > 0:
+                version += '-beta{}'.format(ndk.config.beta)
+            source_properties.writelines([
+                'Pkg.Desc = Android NDK\n',
+                'Pkg.Revision = {}\n'.format(version)
+            ])
 
 
-def create_notice_file(path: Path, for_group: ndk.builds.NoticeGroup) -> None:
+class AdbPy(ndk.builds.PythonPackage):
+    name = 'adb.py'
+    path = ndk.paths.android_path('development/python-packages/adb/setup.py')
+    notice = ndk.paths.android_path('development/python-packages/NOTICE')
+
+
+class Lit(ndk.builds.PythonPackage):
+    name = 'lit'
+    path = ndk.paths.android_path('external/llvm/utils/lit/setup.py')
+    notice = ndk.paths.android_path('external/llvm/NOTICE')
+
+
+class NdkPy(ndk.builds.PythonPackage):
+    name = 'ndk.py'
+    path = ndk.paths.ndk_path('setup.py')
+
+
+def create_notice_file(path: str, for_group: ndk.builds.NoticeGroup) -> None:
     # Using sets here so we can perform some amount of duplicate reduction. In
     # a lot of cases there will be minor differences that cause lots of
     # "duplicates", but might as well catch what we can.
@@ -1974,48 +2532,30 @@
 
     licenses = set()
     for notice_path in notice_files:
-        with open(notice_path, encoding="utf-8") as notice_file:
+        with open(notice_path, encoding='utf-8') as notice_file:
             licenses.add(notice_file.read())
 
-    with path.open("w", encoding="utf-8") as output_file:
+    with open(path, 'w', encoding='utf-8') as output_file:
         # Sorting the contents here to try to make things deterministic.
         output_file.write(os.linesep.join(sorted(list(licenses))))
 
 
-def launch_build(
-    worker: ndk.workqueue.Worker,
-    module: ndk.builds.Module,
-    log_dir: Path,
-    debuggable: bool,
-) -> Tuple[bool, ndk.builds.Module]:
-    result = do_build(worker, module, log_dir, debuggable)
+def launch_build(worker: ndk.workqueue.Worker, module: ndk.builds.Module,
+                 log_dir: str) -> Tuple[bool, ndk.builds.Module]:
+    result = do_build(worker, module, log_dir)
     if not result:
         return result, module
     do_install(worker, module)
     return True, module
 
 
-@contextlib.contextmanager
-def file_logged_context(path: Path) -> Iterator[None]:
-    with path.open("w") as log_file:
+def do_build(worker: ndk.workqueue.Worker, module: ndk.builds.Module,
+             log_dir: str) -> bool:
+    with open(module.log_path(log_dir), 'w') as log_file:
         os.dup2(log_file.fileno(), sys.stdout.fileno())
         os.dup2(log_file.fileno(), sys.stderr.fileno())
-        yield
-
-
-def do_build(
-    worker: ndk.workqueue.Worker,
-    module: ndk.builds.Module,
-    log_dir: Path,
-    debuggable: bool,
-) -> bool:
-    if debuggable:
-        cm: ContextManager[None] = contextlib.nullcontext()
-    else:
-        cm = file_logged_context(module.log_path(log_dir))
-    with cm:
         try:
-            worker.status = f"Building {module}..."
+            worker.status = 'Building {}...'.format(module)
             module.build()
             return True
         except Exception:  # pylint: disable=broad-except
@@ -2023,17 +2563,26 @@
             return False
 
 
-def do_install(worker: ndk.workqueue.Worker, module: ndk.builds.Module) -> None:
-    worker.status = "Installing {}...".format(module)
+def do_install(worker: ndk.workqueue.Worker,
+               module: ndk.builds.Module) -> None:
+    worker.status = 'Installing {}...'.format(module)
     module.install()
 
 
+def split_module_by_arch(module: ndk.builds.Module, arches: List[ndk.abis.Arch]
+                         ) -> Iterator[ndk.builds.Module]:
+    if module.split_build_by_arch:
+        for arch in arches:
+            build_module = copy.deepcopy(module)
+            build_module.build_arch = arch
+            yield build_module
+    else:
+        yield module
+
+
 def _get_transitive_module_deps(
-    module: ndk.builds.Module,
-    deps: Set[ndk.builds.Module],
-    unknown_deps: Set[str],
-    seen: Set[ndk.builds.Module],
-) -> None:
+        module: ndk.builds.Module, deps: Set[ndk.builds.Module],
+        unknown_deps: Set[str], seen: Set[ndk.builds.Module]) -> None:
     seen.add(module)
 
     for name in module.deps:
@@ -2053,8 +2602,7 @@
 
 
 def get_transitive_module_deps(
-    module: ndk.builds.Module,
-) -> Tuple[Set[ndk.builds.Module], Set[str]]:
+        module: ndk.builds.Module) -> Tuple[Set[ndk.builds.Module], Set[str]]:
     seen: Set[ndk.builds.Module] = set()
     deps: Set[ndk.builds.Module] = set()
     unknown_deps: Set[str] = set()
@@ -2063,7 +2611,7 @@
 
 
 def get_modules_to_build(
-    module_names: Iterable[str],
+        module_names: Iterable[str], arches: List[ndk.abis.Arch]
 ) -> Tuple[List[ndk.builds.Module], Set[ndk.builds.Module]]:
     """Returns a list of modules to be built given a list of module names.
 
@@ -2099,176 +2647,159 @@
         unknown_modules.update(unknown_deps)
 
     if unknown_modules:
-        sys.exit("Unknown modules: {}".format(", ".join(sorted(list(unknown_modules)))))
+        sys.exit('Unknown modules: {}'.format(
+            ', '.join(sorted(list(unknown_modules)))))
 
     build_modules = []
     for module in modules:
-        build_modules.append(module)
+        for build_module in split_module_by_arch(module, arches):
+            build_modules.append(build_module)
 
     return sorted(list(build_modules), key=str), deps_only
 
 
-ALL_MODULES = [t() for t in ALL_MODULE_TYPES]
+ALL_MODULES = [
+    AdbPy(),
+    BaseToolchain(),
+    Binutils(),
+    CanaryReadme(),
+    Changelog(),
+    Clang(),
+    CpuFeatures(),
+    Gdb(),
+    GdbServer(),
+    Gtest(),
+    HostTools(),
+    LibAndroidSupport(),
+    LibShaderc(),
+    Libcxx(),
+    Libcxxabi(),
+    Lit(),
+    Make(),
+    Meta(),
+    NativeAppGlue(),
+    NdkBuild(),
+    NdkBuildShortcut(),
+    NdkGdb(),
+    NdkGdbShortcut(),
+    NdkHelper(),
+    NdkPy(),
+    NdkStack(),
+    NdkStackShortcut(),
+    NdkWhich(),
+    NdkWhichShortcut(),
+    Platforms(),
+    PythonPackages(),
+    Readme(),
+    RenderscriptLibs(),
+    RenderscriptToolchain(),
+    ShaderTools(),
+    SimplePerf(),
+    SourceProperties(),
+    Sysroot(),
+    SystemStl(),
+    Toolchain(),
+    Vulkan(),
+    WrapSh(),
+    Yasm(),
+]
+
+
 NAMES_TO_MODULES = {m.name: m for m in ALL_MODULES}
 
 
-def iter_python_app_modules() -> Iterator[ndk.builds.PythonApplication]:
-    """Returns an Iterator over all python applications."""
-    for module in ALL_MODULES:
-        if isinstance(module, ndk.builds.PythonApplication):
-            yield module
-
-
 def get_all_module_names() -> List[str]:
     return [m.name for m in ALL_MODULES if m.enabled]
 
 
-def build_number_arg(value: str) -> int:
-    if value.startswith("P"):
+def build_number_arg(value: str) -> str:
+    if value.startswith('P'):
         # Treehugger build. Treat as a local development build.
-        return 0
-    return int(value)
+        return '0'
+    return value
 
 
 def parse_args() -> Tuple[argparse.Namespace, List[str]]:
-    parser = argparse.ArgumentParser(description=inspect.getdoc(sys.modules[__name__]))
+    parser = argparse.ArgumentParser(
+        description=inspect.getdoc(sys.modules[__name__]))
 
     parser.add_argument(
-        "-v",
-        "--verbose",
-        action="count",
-        dest="verbosity",
-        default=0,
-        help="Increase logging verbosity.",
-    )
+        '--arch',
+        choices=('arm', 'arm64', 'x86', 'x86_64'),
+        help='Build for the given architecture. Build all by default.')
+    parser.add_argument(
+        '-j', '--jobs', type=int, default=multiprocessing.cpu_count(),
+        help=('Number of parallel builds to run. Note that this will not '
+              'affect the -j used for make; this just parallelizes '
+              'checkbuild.py. Defaults to the number of CPUs available.'))
 
     parser.add_argument(
-        "-j",
-        "--jobs",
-        type=int,
-        default=multiprocessing.cpu_count(),
-        help=(
-            "Number of parallel builds to run. Note that this will not "
-            "affect the -j used for make; this just parallelizes "
-            "checkbuild.py. Defaults to the number of CPUs available. "
-            "Disabled when --debugabble is used."
-        ),
-    )
-
-    parser.add_argument(
-        "--debuggable",
-        action="store_true",
-        help=(
-            "Prints build output to the console and disables threading to "
-            "allow debugging with breakpoint()"
-        ),
-    )
-
-    parser.add_argument(
-        "--skip-deps",
-        action="store_true",
-        help=(
-            "Assume that dependencies have been built and only build "
-            "explicitly named modules."
-        ),
-    )
+        '--skip-deps', action='store_true',
+        help=('Assume that dependencies have been built and only build '
+              'explicitly named modules.'))
 
     package_group = parser.add_mutually_exclusive_group()
     package_group.add_argument(
-        "--package",
-        action="store_true",
-        dest="package",
-        help="Package the NDK when done building.",
-    )
+        '--package', action='store_true', dest='package', default=True,
+        help='Package the NDK when done building (default).')
     package_group.add_argument(
-        "--no-package",
-        action="store_false",
-        dest="package",
-        help="Do not package the NDK when done building (default).",
-    )
+        '--no-package', action='store_false', dest='package',
+        help='Do not package the NDK when done building.')
+    package_group.add_argument(
+        '--force-package', action='store_true', dest='force_package',
+        help='Force a package even if only building a subset of modules.')
 
     test_group = parser.add_mutually_exclusive_group()
     test_group.add_argument(
-        "--build-tests",
-        action="store_true",
-        dest="build_tests",
-        default=True,
-        help="Build tests when finished. Not supported when targeting Windows.",
-    )
+        '--build-tests', action='store_true', dest='build_tests', default=True,
+        help=textwrap.dedent("""\
+        Build tests when finished. --package is required. Not supported
+        when targeting Windows.
+        """))
     test_group.add_argument(
-        "--no-build-tests",
-        action="store_false",
-        dest="build_tests",
-        help="Skip building tests after building the NDK.",
-    )
-
-    package_test_group = parser.add_mutually_exclusive_group()
-    package_test_group.add_argument(
-        "--package-tests",
-        action="store_true",
-        dest="package_tests",
-        default=None,
-        help="Package tests as build artifacts. Requires --build-tests.",
-    )
-    package_test_group.add_argument(
-        "--no-package-tests",
-        action="store_false",
-        dest="package_tests",
-        default=None,
-        help="Don't package tests after building them.",
-    )
+        '--no-build-tests', action='store_false', dest='build_tests',
+        help='Skip building tests after building the NDK.')
 
     parser.add_argument(
-        "--build-number",
-        default="0",
-        type=build_number_arg,
-        help="Build number for use in version files.",
-    )
-    parser.add_argument("--release", help="Ignored. Temporarily compatibility.")
+        '--build-number', default='0', type=build_number_arg,
+        help='Build number for use in version files.')
+    parser.add_argument(
+        '--release', help='Ignored. Temporarily compatibility.')
 
     parser.add_argument(
-        "--system",
-        choices=Host,
-        type=Host,
-        default=Host.current(),
-        help="Build for the given OS.",
-    )
+        '--system',
+        choices=ndk.hosts.Host,
+        type=ndk.hosts.Host,
+        default=ndk.hosts.get_default_host(),
+        help='Build for the given OS.')
 
     module_group = parser.add_mutually_exclusive_group()
 
     module_group.add_argument(
-        "--module",
-        dest="modules",
-        action="append",
-        default=[],
-        choices=get_all_module_names(),
-        help="NDK modules to build.",
-    )
+        '--module', dest='modules', action='append', default=[],
+        choices=get_all_module_names(), help='NDK modules to build.')
 
     return parser.parse_known_args()
 
 
-def log_build_failure(log_path: Path, dist_dir: Path) -> None:
-    contents = log_path.read_text()
-    print(contents)
+def log_build_failure(log_path: str, dist_dir: str) -> None:
+    with open(log_path, 'r') as log_file:
+        contents = log_file.read()
+        print(contents)
 
-    # The build server has a build_error.log file that is supposed to be
-    # the short log of the failure that stopped the build. Append our
-    # failing log to that.
-    build_error_log = dist_dir / "logs/build_error.log"
-    with build_error_log.open("a", encoding="utf-8") as error_log:
-        error_log.write("\n")
-        error_log.write(contents)
+        # The build server has a build_error.log file that is supposed to be
+        # the short log of the failure that stopped the build. Append our
+        # failing log to that.
+        build_error_log = os.path.join(dist_dir, 'logs/build_error.log')
+        with open(build_error_log, 'a') as error_log:
+            error_log.write('\n')
+            error_log.write(contents)
 
 
-def launch_buildable(
-    deps: ndk.deps.DependencyManager,
-    workqueue: ndk.workqueue.AnyWorkQueue,
-    log_dir: Path,
-    debuggable: bool,
-    skip_deps: bool,
-    skip_modules: Set[ndk.builds.Module],
-) -> None:
+def launch_buildable(deps: ndk.deps.DependencyManager,
+                     workqueue: ndk.workqueue.AnyWorkQueue, log_dir: str,
+                     skip_deps: bool,
+                     skip_modules: Set[ndk.builds.Module]) -> None:
     # If args.skip_deps is true, we could get into a case where we just
     # dequeued the only module that was still building and the only
     # items in get_buildable() are modules that will be skipped.
@@ -2283,235 +2814,168 @@
             if skip_deps and module in skip_modules:
                 deps.complete(module)
                 continue
-            workqueue.add_task(launch_build, module, log_dir, debuggable)
+            workqueue.add_task(launch_build, module, log_dir)
 
 
-@contextlib.contextmanager
-def build_ui_context(debuggable: bool) -> Iterator[None]:
-    if debuggable:
-        yield
-    else:
-        console = ndk.ansi.get_console()
-        with ndk.ansi.disable_terminal_echo(sys.stdin):
-            with console.cursor_hide_context():
-                yield
-
-
-def wait_for_build(
-    deps: ndk.deps.DependencyManager,
-    workqueue: ndk.workqueue.AnyWorkQueue,
-    dist_dir: Path,
-    log_dir: Path,
-    debuggable: bool,
-    skip_deps: bool,
-    skip_modules: Set[ndk.builds.Module],
-) -> None:
+def wait_for_build(deps: ndk.deps.DependencyManager,
+                   workqueue: ndk.workqueue.AnyWorkQueue, dist_dir: str,
+                   log_dir: str, skip_deps: bool,
+                   skip_modules: Set[ndk.builds.Module]) -> None:
     console = ndk.ansi.get_console()
     ui = ndk.ui.get_build_progress_ui(console, workqueue)
-    with build_ui_context(debuggable):
-        while not workqueue.finished():
-            result, module = workqueue.get_result()
-            if not result:
-                ui.clear()
-                print("Build failed: {}".format(module))
-                log_build_failure(module.log_path(log_dir), dist_dir)
-                sys.exit(1)
-            elif not console.smart_console:
-                ui.clear()
-                print("Build succeeded: {}".format(module))
+    with ndk.ansi.disable_terminal_echo(sys.stdin):
+        with console.cursor_hide_context():
+            while not workqueue.finished():
+                result, module = workqueue.get_result()
+                if not result:
+                    ui.clear()
+                    print('Build failed: {}'.format(module))
+                    log_build_failure(
+                        module.log_path(log_dir), dist_dir)
+                    sys.exit(1)
+                elif not console.smart_console:
+                    ui.clear()
+                    print('Build succeeded: {}'.format(module))
 
-            deps.complete(module)
-            launch_buildable(
-                deps, workqueue, log_dir, debuggable, skip_deps, skip_modules
-            )
+                deps.complete(module)
+                launch_buildable(deps, workqueue, log_dir, skip_deps,
+                                 skip_modules)
 
-            ui.draw()
-        ui.clear()
-        print("Build finished")
+                ui.draw()
+            ui.clear()
+            print('Build finished')
 
 
-def check_ndk_symlink(ndk_dir: Path, src: Path, target: Path) -> None:
-    """Check that the symlink's target is relative, exists, and points within
-    the NDK installation.
-    """
-    if target.is_absolute():
-        raise RuntimeError(f"Symlink {src} points to absolute path {target}")
-    ndk_dir = ndk_dir.resolve()
-    cur = src.parent.resolve()
-    for part in target.parts:
-        # (cur / part) might itself be a symlink. Its validity is checked from
-        # the top-level scan, so it doesn't need to be checked here.
-        cur = (cur / part).resolve()
-        if not cur.exists():
-            raise RuntimeError(f"Symlink {src} targets non-existent {cur}")
-        if not cur.is_relative_to(ndk_dir):
-            raise RuntimeError(f"Symlink {src} targets {cur} outside NDK {ndk_dir}")
+def build_ndk(modules: List[ndk.builds.Module],
+              deps_only: Set[ndk.builds.Module], out_dir: str, dist_dir: str,
+              args: argparse.Namespace) -> str:
+    arches = list(ndk.abis.ALL_ARCHITECTURES)
+    if args.arch is not None:
+        arches = [args.arch]
 
-
-def check_ndk_symlinks(ndk_dir: Path, host: Host) -> None:
-    for path in ndk.paths.walk(ndk_dir):
-        if not path.is_symlink():
-            continue
-        if host == Host.Windows64:
-            # Symlinks aren't supported well enough on Windows. (e.g. They
-            # require Developer Mode and/or special permissions. Cygwin
-            # tools might create symlinks that non-Cygwin programs don't
-            # recognize.)
-            raise RuntimeError(f"Symlink {path} unexpected in Windows NDK")
-        check_ndk_symlink(ndk_dir, path, path.readlink())
-
-
-def build_ndk(
-    modules: List[ndk.builds.Module],
-    deps_only: Set[ndk.builds.Module],
-    out_dir: Path,
-    dist_dir: Path,
-    args: argparse.Namespace,
-) -> Path:
     build_context = ndk.builds.BuildContext(
-        out_dir, dist_dir, ALL_MODULES, args.system, args.build_number
-    )
+        out_dir, dist_dir, ALL_MODULES, args.system, arches, args.build_number)
 
     for module in modules:
         module.context = build_context
 
-    log_dir = dist_dir / "logs"
-    log_dir.mkdir(parents=True, exist_ok=True)
+    log_dir = os.path.join(dist_dir, 'logs')
+    if not os.path.exists(log_dir):
+        os.makedirs(log_dir)
 
     ndk_dir = ndk.paths.get_install_path(out_dir, args.system)
-    ndk_dir.mkdir(parents=True, exist_ok=True)
+    if not os.path.exists(ndk_dir):
+        os.makedirs(ndk_dir)
 
     deps = ndk.deps.DependencyManager(modules)
-    if args.debuggable:
-        workqueue: ndk.workqueue.AnyWorkQueue = ndk.workqueue.BasicWorkQueue()
-    else:
-        workqueue = ndk.workqueue.WorkQueue(args.jobs)
+    workqueue = ndk.workqueue.WorkQueue(args.jobs)
     try:
-        launch_buildable(
-            deps, workqueue, log_dir, args.debuggable, args.skip_deps, deps_only
-        )
+        launch_buildable(deps, workqueue, log_dir, args.skip_deps, deps_only)
         wait_for_build(
-            deps,
-            workqueue,
-            dist_dir,
-            log_dir,
-            args.debuggable,
-            args.skip_deps,
-            deps_only,
-        )
+            deps, workqueue, dist_dir, log_dir, args.skip_deps, deps_only)
 
         if deps.get_buildable():
             raise RuntimeError(
-                "Builder stopped early. Modules are still "
-                "buildable: {}".format(", ".join(str(deps.get_buildable())))
-            )
+                'Builder stopped early. Modules are still '
+                'buildable: {}'.format(', '.join(str(deps.get_buildable()))))
 
-        create_notice_file(ndk_dir / "NOTICE", ndk.builds.NoticeGroup.BASE)
         create_notice_file(
-            ndk_dir / "NOTICE.toolchain", ndk.builds.NoticeGroup.TOOLCHAIN
-        )
-        check_ndk_symlinks(ndk_dir, args.system)
+            os.path.join(ndk_dir, 'NOTICE'),
+            ndk.builds.NoticeGroup.BASE)
+        create_notice_file(
+            os.path.join(ndk_dir, 'NOTICE.toolchain'),
+            ndk.builds.NoticeGroup.TOOLCHAIN)
         return ndk_dir
     finally:
         workqueue.terminate()
         workqueue.join()
 
 
-def build_ndk_for_cross_compile(out_dir: Path, args: argparse.Namespace) -> None:
+def build_ndk_for_cross_compile(out_dir: str, arches: List[ndk.abis.Arch],
+                                args: argparse.Namespace) -> None:
     args = copy.deepcopy(args)
-    args.system = Host.current()
-    if args.system != Host.Linux:
+    args.system = ndk.hosts.get_default_host()
+    if args.system != ndk.hosts.Host.Linux:
         raise NotImplementedError
     module_names = NAMES_TO_MODULES.keys()
-    modules, deps_only = get_modules_to_build(module_names)
-    print("Building Linux modules: {}".format(" ".join([str(m) for m in modules])))
+    modules, deps_only = get_modules_to_build(module_names, arches)
+    print('Building Linux modules: {}'.format(' '.join(
+        [str(m) for m in modules])))
     build_ndk(modules, deps_only, out_dir, out_dir, args)
 
 
-def create_ndk_symlink(out_dir: Path) -> None:
+def create_ndk_symlink(out_dir: str) -> None:
     this_host_ndk = ndk.paths.get_install_path()
-    ndk_symlink = out_dir / this_host_ndk.name
-    if not ndk_symlink.exists():
+    ndk_symlink = os.path.join(out_dir, os.path.basename(this_host_ndk))
+    if not os.path.exists(ndk_symlink):
         os.symlink(this_host_ndk, ndk_symlink)
 
 
-def get_directory_size(path: Path) -> int:
-    du_str = subprocess.check_output(["du", "-sm", str(path)])
-    match = re.match(r"^(\d+)", du_str.decode("utf-8"))
+def get_directory_size(path: str) -> int:
+    du_str = subprocess.check_output(['du', '-sm', path])
+    match = re.match(r'^(\d+)', du_str.decode('utf-8'))
     if match is None:
-        raise RuntimeError(f"Could not determine the size of {path}")
+        raise RuntimeError(f'Could not determine the size of {path}')
     size_str = match.group(1)
     return int(size_str)
 
 
 def main() -> None:
+    logging.basicConfig()
+
     total_timer = ndk.timer.Timer()
     total_timer.start()
 
     args, module_names = parse_args()
-
-    ensure_python_environment()
-
-    if args.verbosity >= 2:
-        logging.basicConfig(level=logging.DEBUG)
-    elif args.verbosity == 1:
-        logging.basicConfig(level=logging.INFO)
-    else:
-        logging.basicConfig()
-
     module_names.extend(args.modules)
     if not module_names:
         module_names = get_all_module_names()
 
     required_package_modules = set(get_all_module_names())
     have_required_modules = required_package_modules <= set(module_names)
-
-    if args.package_tests is None:
-        args.package_tests = args.package
+    do_package = have_required_modules if args.package else False
+    if args.force_package:
+        do_package = True
 
     # TODO(danalbert): wine?
     # We're building the Windows packages from Linux, so we can't actually run
     # any of the tests from here.
-    if args.system.is_windows or not have_required_modules:
+    if args.system.is_windows or not do_package:
         args.build_tests = False
 
-    os.chdir(Path(__file__).resolve().parent.parent)
+    os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
 
     # Set ANDROID_BUILD_TOP.
-    if "ANDROID_BUILD_TOP" in os.environ:
-        sys.exit(
-            textwrap.dedent(
-                """\
+    if 'ANDROID_BUILD_TOP' in os.environ:
+        sys.exit(textwrap.dedent("""\
             Error: ANDROID_BUILD_TOP is already set in your environment.
 
-            This typically means you are running in a shell that has launched a
+            This typically means you are running in a shell that has lunched a
             target in a platform build. The platform environment interferes
             with the NDK build environment, so the build cannot continue.
 
-            Launch a new shell before building the NDK."""
-            )
-        )
+            Launch a new shell before building the NDK."""))
 
-    os.environ["ANDROID_BUILD_TOP"] = str(ndk.paths.android_path())
+    os.environ['ANDROID_BUILD_TOP'] = ndk.paths.android_path()
+
+    arches = list(ndk.abis.ALL_ARCHITECTURES)
+    if args.arch is not None:
+        arches = [args.arch]
 
     out_dir = ndk.paths.get_out_dir()
-    dist_dir = ndk.paths.get_dist_dir()
+    dist_dir = ndk.paths.get_dist_dir(out_dir)
 
-    print("Machine has {} CPUs".format(multiprocessing.cpu_count()))
+    print('Machine has {} CPUs'.format(multiprocessing.cpu_count()))
 
     if args.system.is_windows and not args.skip_deps:
         # Since the Windows NDK is cross compiled, we need to build a Linux NDK
         # first so we can build components like libc++.
-        build_ndk_for_cross_compile(Path(out_dir), args)
+        build_ndk_for_cross_compile(out_dir, arches, args)
 
-    modules, deps_only = get_modules_to_build(module_names)
-    print(
-        "Building modules: {}".format(
-            " ".join(
-                [str(m) for m in modules if not args.skip_deps or m not in deps_only]
-            )
-        )
-    )
+    modules, deps_only = get_modules_to_build(module_names, arches)
+    print('Building modules: {}'.format(' '.join(
+        [str(m) for m in modules
+         if not args.skip_deps or m not in deps_only])))
 
     build_timer = ndk.timer.Timer()
     with build_timer:
@@ -2524,41 +2988,39 @@
 
     package_timer = ndk.timer.Timer()
     with package_timer:
-        if args.package:
-            print("Packaging NDK...")
+        if do_package:
+            print('Packaging NDK...')
+            host_tag = ndk.hosts.host_to_tag(args.system)
             # NB: Purging of unwanted files (.pyc, Android.bp, etc) happens as
             # part of packaging. If testing is ever moved to happen before
             # packaging, ensure that the directory is purged before and after
             # building the tests.
             package_path = package_ndk(
-                ndk_dir, out_dir, dist_dir, args.system, args.build_number
-            )
-            packaged_size_bytes = package_path.stat().st_size
-            packaged_size = packaged_size_bytes // (2**20)
+                ndk_dir, dist_dir, host_tag, args.build_number)
+            packaged_size_bytes = os.path.getsize(package_path)
+            packaged_size = packaged_size_bytes // (2 ** 20)
 
     good = True
     test_timer = ndk.timer.Timer()
     with test_timer:
         if args.build_tests:
-            print("Building tests...")
-            purge_unwanted_files(ndk_dir)
             good = build_ndk_tests(out_dir, dist_dir, args)
             print()  # Blank line between test results and timing data.
 
     total_timer.finish()
 
-    print("")
-    print("Installed size: {} MiB".format(installed_size))
-    if args.package:
-        print("Package size: {} MiB".format(packaged_size))
-    print("Finished {}".format("successfully" if good else "unsuccessfully"))
-    print("Build: {}".format(build_timer.duration))
-    print("Packaging: {}".format(package_timer.duration))
-    print("Testing: {}".format(test_timer.duration))
-    print("Total: {}".format(total_timer.duration))
+    print('')
+    print('Installed size: {} MiB'.format(installed_size))
+    if do_package:
+        print('Package size: {} MiB'.format(packaged_size))
+    print('Finished {}'.format('successfully' if good else 'unsuccessfully'))
+    print('Build: {}'.format(build_timer.duration))
+    print('Packaging: {}'.format(package_timer.duration))
+    print('Testing: {}'.format(test_timer.duration))
+    print('Total: {}'.format(total_timer.duration))
 
-    subject = "NDK Build {}!".format("Passed" if good else "Failed")
-    body = "Build finished in {}".format(total_timer.duration)
+    subject = 'NDK Build {}!'.format('Passed' if good else 'Failed')
+    body = 'Build finished in {}'.format(total_timer.duration)
     ndk.notify.toast(subject, body)
 
     sys.exit(not good)
@@ -2591,5 +3053,5 @@
         main()
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     _run_main_in_new_process_group()
diff --git a/ndk/cmake.py b/ndk/cmake.py
deleted file mode 100644
index 29555a7..0000000
--- a/ndk/cmake.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""APIs for dealing with cmake scripts."""
-
-import os
-import pprint
-import shlex
-import shutil
-import subprocess
-from functools import cached_property
-from pathlib import Path
-from typing import Dict, List, Optional
-
-import ndk.paths
-import ndk.toolchains
-from ndk.hosts import Host
-
-SYSTEM_NAME_MAP = {
-    Host.Darwin: "Darwin",
-    Host.Linux: "Linux",
-    Host.Windows64: "Windows",
-}
-
-HOST_TRIPLE_MAP = {
-    Host.Linux: "x86_64-linux-gnu",
-    Host.Windows64: "x86_64-w64-mingw32",
-}
-
-
-def find_cmake() -> Path:
-    host = Host.current()
-    return (
-        ndk.paths.ANDROID_DIR
-        / "prebuilts"
-        / "cmake"
-        / host.platform_tag
-        / "bin"
-        / "cmake"
-    ).with_suffix(host.exe_suffix)
-
-
-def find_ninja() -> Path:
-    host = Host.current()
-    return (
-        ndk.paths.ANDROID_DIR / "prebuilts" / "ninja" / host.platform_tag / "ninja"
-    ).with_suffix(host.exe_suffix)
-
-
-class CMakeBuilder:
-    """Builder for an cmake project."""
-
-    toolchain: ndk.toolchains.Toolchain
-
-    def __init__(
-        self,
-        src_path: Path,
-        build_dir: Path,
-        host: Host,
-        additional_flags: Optional[List[str]] = None,
-        additional_ldflags: Optional[List[str]] = None,
-        additional_env: Optional[Dict[str, str]] = None,
-        run_ctest: bool = False,
-    ) -> None:
-        """Initializes an autoconf builder.
-
-        Args:
-            src_path: Path to the cmake project.
-            build_dir: Directory to use for building. If the directory exists,
-            it will be deleted and recreated to ensure the build is correct.
-            host: Host to be used for the --host argument (the
-                cross-compilation target).
-            additional_flags: Additional flags to pass to the compiler.
-            additional_env: Additional environment to set, used during
-                configure, build, and install.
-        """
-        self.src_path = src_path
-        self.build_directory = build_dir
-        self.host = host
-        self.additional_flags = additional_flags
-        self.additional_ldflags = additional_ldflags
-        self.additional_env = additional_env
-        self.run_ctest = run_ctest
-
-        self.working_directory = self.build_directory / "build"
-        self.install_directory = self.build_directory / "install"
-
-        self.toolchain = ndk.toolchains.ClangToolchain(self.host)
-
-    @property
-    def flags(self) -> List[str]:
-        """Returns default cflags for the target."""
-        # TODO: Are these the flags we want? These are what we've used
-        # historically.
-        flags = [
-            "-Os",
-            "-fomit-frame-pointer",
-            "-s",
-        ]
-        if not self.host == Host.Darwin:
-            flags.append("-fuse-ld=lld")
-        if self.additional_flags:
-            flags.extend(self.additional_flags)
-        return flags
-
-    @property
-    def ldflags(self) -> List[str]:
-        ldflags = []
-        if self.additional_ldflags:
-            ldflags.extend(self.additional_ldflags)
-        return ldflags
-
-    def _run(self, cmd: List[str]) -> None:
-        """Runs and logs execution of a subprocess."""
-        subproc_env = dict(os.environ)
-        if self.additional_env:
-            subproc_env.update(self.additional_env)
-
-        pp_cmd = shlex.join(cmd)
-        if subproc_env != dict(os.environ):
-            pp_env = pprint.pformat(self.additional_env, indent=4)
-            print("Running: {} with env:\n{}".format(pp_cmd, pp_env))
-        else:
-            print("Running: {}".format(pp_cmd))
-
-        subprocess.check_call(cmd, env=subproc_env, cwd=self.working_directory)
-
-    @cached_property
-    def _cmake(self) -> Path:
-        return find_cmake()
-
-    @cached_property
-    def _ninja(self) -> Path:
-        return find_ninja()
-
-    @property
-    def _ctest(self) -> Path:
-        host = Host.current()
-        return (
-            ndk.paths.ANDROID_DIR
-            / "prebuilts"
-            / "cmake"
-            / host.platform_tag
-            / "bin"
-            / "ctest"
-        ).with_suffix(host.exe_suffix)
-
-    @property
-    def cmake_defines(self) -> Dict[str, str]:
-        """CMake defines."""
-        flags = self.toolchain.flags + self.flags
-        cflags = " ".join(flags)
-        cxxflags = " ".join(flags + ["-stdlib=libc++"])
-        ldflags = " ".join(self.ldflags)
-        defines: Dict[str, str] = {
-            "CMAKE_C_COMPILER": str(self.toolchain.cc),
-            "CMAKE_CXX_COMPILER": str(self.toolchain.cxx),
-            "CMAKE_AR": str(self.toolchain.ar),
-            "CMAKE_RANLIB": str(self.toolchain.ranlib),
-            "CMAKE_NM": str(self.toolchain.nm),
-            "CMAKE_STRIP": str(self.toolchain.strip),
-            "CMAKE_LINKER": str(self.toolchain.ld),
-            "CMAKE_ASM_FLAGS": cflags,
-            "CMAKE_C_FLAGS": cflags,
-            "CMAKE_CXX_FLAGS": cxxflags,
-            "CMAKE_EXE_LINKER_FLAGS": ldflags,
-            "CMAKE_SHARED_LINKER_FLAGS": ldflags,
-            "CMAKE_MODULE_LINKER_FLAGS": ldflags,
-            "CMAKE_BUILD_TYPE": "Release",
-            "CMAKE_INSTALL_PREFIX": str(self.install_directory),
-            "CMAKE_MAKE_PROGRAM": str(self._ninja),
-            "CMAKE_SYSTEM_NAME": SYSTEM_NAME_MAP[self.host],
-            "CMAKE_SYSTEM_PROCESSOR": "x86_64",
-            "CMAKE_FIND_ROOT_PATH_MODE_INCLUDE": "ONLY",
-            "CMAKE_FIND_ROOT_PATH_MODE_LIBRARY": "ONLY",
-            "CMAKE_FIND_ROOT_PATH_MODE_PACKAGE": "ONLY",
-            "CMAKE_FIND_ROOT_PATH_MODE_PROGRAM": "NEVER",
-        }
-        if self.host.is_windows:
-            defines["CMAKE_RC"] = str(self.toolchain.rescomp)
-        if self.host == Host.Darwin:
-            defines["CMAKE_OSX_ARCHITECTURES"] = "x86_64;arm64"
-        else:
-            defines["CMAKE_C_COMPILER_TARGET"] = HOST_TRIPLE_MAP[self.host]
-            defines["CMAKE_CXX_COMPILER_TARGET"] = HOST_TRIPLE_MAP[self.host]
-        return defines
-
-    def clean(self) -> None:
-        """Cleans output directory.
-
-        If necessary, existing output directory will be removed. After
-        removal, the inner directories (working directory, install directory,
-        and toolchain directory) will be created.
-        """
-        if self.build_directory.exists():
-            shutil.rmtree(self.build_directory)
-
-        self.working_directory.mkdir(parents=True)
-        self.install_directory.mkdir(parents=True)
-
-    def configure(self, additional_defines: Dict[str, str]) -> None:
-        """Invokes cmake configure."""
-        cmake_cmd = [str(self._cmake), "-GNinja"]
-        defines = self.cmake_defines
-        defines.update(additional_defines)
-        cmake_cmd.extend(f"-D{key}={val}" for key, val in defines.items())
-        cmake_cmd.append(str(self.src_path))
-
-        self._run(cmake_cmd)
-
-    def make(self) -> None:
-        """Builds the project."""
-        self._run([str(self._ninja)])
-
-    def test(self) -> None:
-        """Runs tests."""
-        self._run([str(self._ctest), "--verbose"])
-
-    def install(self) -> None:
-        """Installs the project."""
-        self._run([str(self._ninja), "install/strip"])
-
-    def build(self, additional_defines: Optional[Dict[str, str]] = None) -> None:
-        """Configures and builds an cmake project.
-
-        Args:
-            configure_args: List of arguments to be passed to configure. Does
-                not need to include --prefix, --build, or --host. Those are set
-                up automatically.
-        """
-        self.clean()
-        self.configure({} if additional_defines is None else additional_defines)
-        self.make()
-        if not self.host.is_windows and self.run_ctest:
-            self.test()
-        self.install()
diff --git a/ndk/config.py b/ndk/config.py
index 12f3815..06af5d9 100644
--- a/ndk/config.py
+++ b/ndk/config.py
@@ -1,12 +1,13 @@
 from __future__ import print_function
 
-major = 27
+
+major = 21
 hotfix = 0
-hotfix_str = chr(ord("a") + hotfix) if hotfix else ""
-beta = 1
-beta_str = "-beta{}".format(beta) if beta > 0 else ""
+hotfix_str = chr(ord('a') + hotfix) if hotfix else ''
+beta = 2
+beta_str = '-beta{}'.format(beta) if beta > 0 else ''
 canary = False
-canary_str = "-canary" if canary else ""
-release = "r{}{}{}{}".format(major, hotfix_str, beta_str, canary_str)
-if __name__ == "__main__":
+canary_str = '-canary' if canary else ''
+release = 'r{}{}{}{}'.format(major, hotfix_str, beta_str, canary_str)
+if __name__ == '__main__':
     print(release)
diff --git a/ndk/crtobjectbuilder.py b/ndk/crtobjectbuilder.py
deleted file mode 100644
index 09f1c96..0000000
--- a/ndk/crtobjectbuilder.py
+++ /dev/null
@@ -1,166 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Helper class for building CRT objects."""
-import shlex
-import shutil
-import subprocess
-from pathlib import Path
-
-import ndk.config
-from ndk.platforms import ALL_API_LEVELS
-
-from .abis import Abi, abi_to_triple, clang_target, iter_abis_for_api
-from .paths import ANDROID_DIR, NDK_DIR
-
-
-class CrtObjectBuilder:
-    """Builder for NDK CRT objects."""
-
-    PREBUILTS_PATH = ANDROID_DIR / "prebuilts/ndk/platform"
-
-    def __init__(self, llvm_path: Path, build_dir: Path, build_id: int) -> None:
-        self.llvm_path = llvm_path
-        self.build_dir = build_dir
-        self.build_id = build_id
-        self.artifacts: list[tuple[Abi, int, Path]] = []
-
-    def llvm_tool(self, tool: str) -> Path:
-        """Returns the path to the given LLVM tool."""
-        return self.llvm_path / "bin" / tool
-
-    def get_build_cmd(
-        self,
-        dst: Path,
-        srcs: list[Path],
-        api: int,
-        abi: Abi,
-        build_number: int,
-    ) -> list[str]:
-        """Returns the build command for creating a CRT object."""
-        libc_includes = ANDROID_DIR / "bionic/libc"
-        arch_common_includes = libc_includes / "arch-common/bionic"
-
-        cc = self.llvm_tool("clang")
-
-        args = [
-            str(cc),
-            "-target",
-            clang_target(abi, api),
-            "--sysroot",
-            str(self.PREBUILTS_PATH / "sysroot"),
-            "-fuse-ld=lld",
-            f"-I{libc_includes}",
-            f"-I{arch_common_includes}",
-            f"-DPLATFORM_SDK_VERSION={api}",
-            f'-DABI_NDK_VERSION="{ndk.config.release}"',
-            f'-DABI_NDK_BUILD_NUMBER="{build_number}"',
-            "-O2",
-            "-fpic",
-            "-Wl,-r",
-            "-no-pie",
-            "-nostdlib",
-            "-Wa,--noexecstack",
-            "-Wl,-z,noexecstack",
-            "-o",
-            str(dst),
-        ] + [str(src) for src in srcs]
-
-        if abi == Abi("arm64-v8a"):
-            args.append("-mbranch-protection=standard")
-
-        if dst.name == "crtbegin_static.o":
-            args.append("-DCRTBEGIN_STATIC")
-
-        return args
-
-    def check_elf_note(self, obj_file: Path) -> None:
-        """Verifies that the object file contains the expected note."""
-        # readelf is a cross platform tool, so arch doesn't matter.
-        readelf = self.llvm_tool("llvm-readelf")
-        out = subprocess.run(
-            [readelf, "--notes", obj_file], check=True, text=True, capture_output=True
-        ).stdout
-        if "Android" not in out:
-            raise RuntimeError(f"{obj_file} does not contain NDK ELF note")
-
-    def build_crt_object(
-        self,
-        dst: Path,
-        srcs: list[Path],
-        api: int,
-        abi: Abi,
-        build_number: int,
-        defines: list[str],
-    ) -> None:
-        cc_args = self.get_build_cmd(dst, srcs, api, abi, build_number)
-        cc_args.extend(defines)
-
-        print(f"Running: {shlex.join(cc_args)}")
-        subprocess.check_call(cc_args)
-
-    def build_crt_objects(
-        self,
-        dst_dir: Path,
-        api: int,
-        abi: Abi,
-        build_number: int,
-    ) -> None:
-        src_dir = ANDROID_DIR / "bionic/libc/arch-common/bionic"
-        crt_brand = NDK_DIR / "sources/crt/crtbrand.S"
-
-        objects = {
-            "crtbegin_dynamic.o": [
-                src_dir / "crtbegin.c",
-                crt_brand,
-            ],
-            "crtbegin_so.o": [
-                src_dir / "crtbegin_so.c",
-                crt_brand,
-            ],
-            "crtbegin_static.o": [
-                src_dir / "crtbegin.c",
-                crt_brand,
-            ],
-            "crtend_android.o": [
-                src_dir / "crtend.S",
-            ],
-            "crtend_so.o": [
-                src_dir / "crtend_so.S",
-            ],
-        }
-
-        for name, srcs in objects.items():
-            dst_path = dst_dir / name
-            defs = []
-            if name == "crtbegin_static.o":
-                # libc.a is always the latest version, so ignore the API level
-                # setting for crtbegin_static.
-                defs.append("-D_FORCE_CRT_ATFORK")
-            self.build_crt_object(dst_path, srcs, api, abi, build_number, defs)
-            if name.startswith("crtbegin"):
-                self.check_elf_note(dst_path)
-            self.artifacts.append((abi, api, dst_path))
-
-    def build(self) -> None:
-        self.artifacts = []
-        if self.build_dir.exists():
-            shutil.rmtree(self.build_dir)
-
-        for api in ALL_API_LEVELS:
-            for abi in iter_abis_for_api(api):
-                dst_dir = self.build_dir / abi_to_triple(abi) / str(api)
-                dst_dir.mkdir(parents=True, exist_ok=True)
-                self.build_crt_objects(dst_dir, api, abi, self.build_id)
diff --git a/ndk/debug.py b/ndk/debug.py
index ddd4019..256cbf0 100644
--- a/ndk/debug.py
+++ b/ndk/debug.py
@@ -19,19 +19,18 @@
 import sys
 import traceback
 from types import FrameType
-from typing import Optional
 
 
-def attach_debugger(_signum: int, frame: Optional[FrameType]) -> None:
+def attach_debugger(_signum: int, frame: FrameType) -> None:
     """Attaches pdb to the frame at the time of signalling."""
     # mypy doesn't know that pdb.Pdb exists.
-    pdb.Pdb().set_trace(frame)
+    pdb.Pdb().set_trace(frame)  # type: ignore
 
 
-def dump_trace(_signum: int, frame: Optional[FrameType]) -> None:
+def dump_trace(_signum: int, frame: FrameType) -> None:
     """Dumps a stack trace of the frame at the time of signalling."""
-    msg = "Traceback:\n"
-    msg += "".join(traceback.format_stack(frame))
+    msg = 'Traceback:\n'
+    msg += ''.join(traceback.format_stack(frame))
     sys.stderr.write(msg)
 
 
diff --git a/ndk/deps.py b/ndk/deps.py
index adbdfb8..c66f300 100644
--- a/ndk/deps.py
+++ b/ndk/deps.py
@@ -16,20 +16,16 @@
 """Performs dependency tracking for ndk.builds modules."""
 from typing import Dict, Iterable, List, Set
 
-import ndk.graph
 from ndk.builds import Module
+import ndk.graph
 
 
 class CyclicDependencyError(RuntimeError):
     """An error indicating a cyclic dependency in the module graph."""
-
     def __init__(self, modules: Iterable[ndk.graph.Node]) -> None:
         """Initializes a CyclicDependencyError."""
-        super().__init__(
-            "Detected cyclic dependency: {}".format(
-                " -> ".join([m.name for m in modules])
-            )
-        )
+        super().__init__('Detected cyclic dependency: {}'.format(' -> '.join(
+            [m.name for m in modules])))
 
 
 def prove_acyclic(modules: Iterable[Module]) -> None:
@@ -60,7 +56,6 @@
     the DependencyManager is informated of a module build being completed via
     DependencyManager.complete().
     """
-
     def __init__(self, all_modules: Iterable[Module]) -> None:
         """Initializes a DependencyManager."""
         if not all_modules:
@@ -77,7 +72,8 @@
         # Reverse map from a module to all of its dependents used to speed up
         # lookups.
         self.deps_to_modules: Dict[str, List[Module]] = {
-            m.name: [] for m in all_modules
+            m.name: []
+            for m in all_modules
         }
         for module in all_modules:
             for dep in module.deps:
diff --git a/ndk/ext/os.py b/ndk/ext/os.py
index 8239012..395e7d5 100644
--- a/ndk/ext/os.py
+++ b/ndk/ext/os.py
@@ -18,12 +18,14 @@
 
 import contextlib
 import os
-from pathlib import Path
-from typing import ContextManager, Iterator, MutableMapping
+from typing import ContextManager, MutableMapping, Iterator
 
 
 @contextlib.contextmanager
-def cd(path: Path) -> Iterator[None]:
+def cd(path: str) -> Iterator[None]:
+    # For some reason pylint can't detect that getcwd/chdir are in os because
+    # we're ndk.ext.os, despite the fact that os.environ is fine.
+    # pylint: disable=no-member
     curdir = os.getcwd()
     os.chdir(path)
     try:
@@ -47,10 +49,10 @@
         os.environ = env  # type: ignore
         yield
     finally:
-        os.environ = old_environ  # type: ignore
+        os.environ = old_environ # type: ignore
 
 
-def modify_environ(env: MutableMapping[str, str]) -> ContextManager[None]:
+def modify_environ(env: MutableMapping[str, str]) -> ContextManager:
     """Extends os.environ with the values in env, restoring on context exit.
 
     The values in env add to the existign environment rather than completely
diff --git a/ndk/ext/shutil.py b/ndk/ext/shutil.py
new file mode 100644
index 0000000..d549532
--- /dev/null
+++ b/ndk/ext/shutil.py
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Extensions for shutil APIs."""
+from __future__ import absolute_import
+
+import errno
+import os
+
+
+def create_directory(path: str) -> None:
+    """Creates a directory, ignoring errors if the directory exists."""
+    try:
+        os.makedirs(path)  # pylint: disable=no-member
+    except OSError as ex:
+        if ex.errno != errno.EEXIST:
+            raise
diff --git a/ndk/ext/subprocess.py b/ndk/ext/subprocess.py
index bf3b30b..24ecdcf 100644
--- a/ndk/ext/subprocess.py
+++ b/ndk/ext/subprocess.py
@@ -14,15 +14,14 @@
 # limitations under the License.
 #
 """Helpers for subprocess APIs."""
-from __future__ import annotations
+from __future__ import absolute_import
 
 import logging
+import os
 import subprocess
-import sys
-from collections.abc import Iterator
-from contextlib import contextmanager
 from typing import Any, Sequence, Tuple
 
+
 # TODO: Remove in favor of subprocess.run.
 
 
@@ -31,28 +30,26 @@
     return logging.getLogger(__name__)
 
 
-def _call_output_inner(
-    cmd: Sequence[str], *args: Any, **kwargs: Any
-) -> Tuple[int, Any]:
+def _call_output_inner(cmd: Sequence[str], *args: Any,
+                       **kwargs: Any) -> Tuple[int, Any]:
     """Does the real work of call_output.
 
     This inner function does the real work and the outer function handles the
     OS specific stuff (Windows needs to handle WindowsError, but that isn't
     defined on non-Windows systems).
     """
-    logger().info("Popen: %s", " ".join(cmd))
-    kwargs.update(
-        {
-            "stdout": subprocess.PIPE,
-            "stderr": subprocess.STDOUT,
-        }
-    )
-    with subprocess.Popen(cmd, *args, **kwargs) as proc:
-        out, _ = proc.communicate()
-        return proc.returncode, out
+    logger().info('Popen: %s', ' '.join(cmd))
+    kwargs.update({
+        'stdout': subprocess.PIPE,
+        'stderr': subprocess.STDOUT,
+    })
+    proc = subprocess.Popen(cmd, *args, **kwargs)
+    out, _ = proc.communicate()
+    return proc.returncode, out
 
 
-def call_output(cmd: Sequence[str], *args: Any, **kwargs: Any) -> Tuple[int, Any]:
+def call_output(cmd: Sequence[str], *args: Any,
+                **kwargs: Any) -> Tuple[int, Any]:
     """Invoke the specified command and return exit code and output.
 
     This is the missing subprocess.call_output, which is the combination of
@@ -63,22 +60,10 @@
 
     Returns: Tuple of (exit_code, output).
     """
-    if sys.platform == "win32":
+    if os.name == 'nt':
         try:
             return _call_output_inner(cmd, *args, **kwargs)
         except WindowsError as error:  # pylint: disable=undefined-variable
             return error.winerror, error.strerror
     else:
         return _call_output_inner(cmd, *args, **kwargs)
-
-
-@contextmanager
-def verbose_subprocess_errors() -> Iterator[None]:
-    try:
-        yield
-    except subprocess.CalledProcessError as ex:
-        if ex.stdout is not None:
-            ex.add_note(f"stdout:\n{ex.stdout}")
-        if ex.stderr is not None:
-            ex.add_note(f"stderr:\n{ex.stderr}")
-        raise
diff --git a/ndk/ext/test_os.py b/ndk/ext/test_os.py
index d017302..f06a102 100644
--- a/ndk/ext/test_os.py
+++ b/ndk/ext/test_os.py
@@ -24,30 +24,30 @@
 
 class OsTest(unittest.TestCase):
     def test_replace_environ(self) -> None:
-        self.assertIn("PATH", os.environ)
-        old_path = os.environ["PATH"]
-        self.assertNotIn("FOO", os.environ)
+        self.assertIn('PATH', os.environ)
+        old_path = os.environ['PATH']
+        self.assertNotIn('FOO', os.environ)
 
-        with ndk.ext.os.replace_environ({"FOO": "bar"}):
-            self.assertNotIn("PATH", os.environ)
-            self.assertIn("FOO", os.environ)
-            self.assertEqual(os.environ["FOO"], "bar")
+        with ndk.ext.os.replace_environ({'FOO': 'bar'}):
+            self.assertNotIn('PATH', os.environ)
+            self.assertIn('FOO', os.environ)
+            self.assertEqual(os.environ['FOO'], 'bar')
 
-        self.assertIn("PATH", os.environ)
-        self.assertEqual(os.environ["PATH"], old_path)
-        self.assertNotIn("FOO", os.environ)
+        self.assertIn('PATH', os.environ)
+        self.assertEqual(os.environ['PATH'], old_path)
+        self.assertNotIn('FOO', os.environ)
 
     def test_modify_environ(self) -> None:
-        self.assertIn("PATH", os.environ)
-        old_path = os.environ["PATH"]
-        self.assertNotIn("FOO", os.environ)
+        self.assertIn('PATH', os.environ)
+        old_path = os.environ['PATH']
+        self.assertNotIn('FOO', os.environ)
 
-        with ndk.ext.os.modify_environ({"FOO": "bar"}):
-            self.assertIn("PATH", os.environ)
-            self.assertEqual(os.environ["PATH"], old_path)
-            self.assertIn("FOO", os.environ)
-            self.assertEqual(os.environ["FOO"], "bar")
+        with ndk.ext.os.modify_environ({'FOO': 'bar'}):
+            self.assertIn('PATH', os.environ)
+            self.assertEqual(os.environ['PATH'], old_path)
+            self.assertIn('FOO', os.environ)
+            self.assertEqual(os.environ['FOO'], 'bar')
 
-        self.assertIn("PATH", os.environ)
-        self.assertEqual(os.environ["PATH"], old_path)
-        self.assertNotIn("FOO", os.environ)
+        self.assertIn('PATH', os.environ)
+        self.assertEqual(os.environ['PATH'], old_path)
+        self.assertNotIn('FOO', os.environ)
diff --git a/ndk/ext/test_subprocess.py b/ndk/ext/test_subprocess.py
deleted file mode 100644
index 6cc5617..0000000
--- a/ndk/ext/test_subprocess.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#
-# Copyright (C) 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Tests for ndk.ext.subprocess."""
-from __future__ import absolute_import
-
-import textwrap
-import traceback
-from subprocess import CalledProcessError
-
-import pytest
-
-import ndk.ext.subprocess
-
-
-class TestVerboseSubprocessErrors:
-    def test_capture_both(self) -> None:
-        with pytest.raises(CalledProcessError) as excinfo:
-            with ndk.ext.subprocess.verbose_subprocess_errors():
-                raise CalledProcessError(1, ["test"], "foo", "bar")
-        assert (
-            textwrap.dedent(
-                """\
-                subprocess.CalledProcessError: Command '['test']' returned non-zero exit status 1.
-                stdout:
-                foo
-                stderr:
-                bar
-                """
-            )
-            == "".join(traceback.format_exception_only(excinfo.value))
-        )
-
-    def test_capture_stdout(self) -> None:
-        with pytest.raises(CalledProcessError) as excinfo:
-            with ndk.ext.subprocess.verbose_subprocess_errors():
-                raise CalledProcessError(1, ["test"], "foo", None)
-        assert (
-            textwrap.dedent(
-                """\
-                subprocess.CalledProcessError: Command '['test']' returned non-zero exit status 1.
-                stdout:
-                foo
-                """
-            )
-            == "".join(traceback.format_exception_only(excinfo.value))
-        )
-
-    def test_capture_stderr(self) -> None:
-        with pytest.raises(CalledProcessError) as excinfo:
-            with ndk.ext.subprocess.verbose_subprocess_errors():
-                raise CalledProcessError(1, ["test"], None, "bar")
-        assert (
-            textwrap.dedent(
-                """\
-                subprocess.CalledProcessError: Command '['test']' returned non-zero exit status 1.
-                stderr:
-                bar
-                """
-            )
-            == "".join(traceback.format_exception_only(excinfo.value))
-        )
-
-    def test_capture_neither(self) -> None:
-        with pytest.raises(CalledProcessError) as excinfo:
-            with ndk.ext.subprocess.verbose_subprocess_errors():
-                raise CalledProcessError(1, ["test"], None, None)
-        assert (
-            textwrap.dedent(
-                """\
-                subprocess.CalledProcessError: Command '['test']' returned non-zero exit status 1.
-                """
-            )
-            == "".join(traceback.format_exception_only(excinfo.value))
-        )
diff --git a/ndk/file.py b/ndk/file.py
new file mode 100644
index 0000000..f2a6738
--- /dev/null
+++ b/ndk/file.py
@@ -0,0 +1,28 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Contains file I/O APIs."""
+
+
+def read_file(path: str) -> str:
+    """Reads the contents of a file into a string, closing the file."""
+    with open(path) as the_file:
+        return the_file.read()
+
+
+def write_file(path: str, contents: str) -> None:
+    """Writes the given string to the path specified, closing the file."""
+    with open(path, 'w') as the_file:
+        the_file.write(contents)
diff --git a/ndk/graph.py b/ndk/graph.py
index 3a2b508..4b86eec 100644
--- a/ndk/graph.py
+++ b/ndk/graph.py
@@ -22,7 +22,7 @@
 class Node:
     """A node in a directed graph."""
 
-    def __init__(self, name: str, outs: Iterable["Node"]) -> None:
+    def __init__(self, name: str, outs: Iterable['Node']) -> None:
         """Initializes a Node.
 
         Args:
@@ -39,7 +39,7 @@
         assert isinstance(other, Node)
         return self.name == other.name
 
-    def __lt__(self, other: object) -> bool:
+    def __le__(self, other: object) -> bool:
         assert isinstance(other, Node)
         return self.name < other.name
 
@@ -73,8 +73,10 @@
         return None
 
     def find_cycle_from_node(
-        self, node: Node, visited: Set[Node], path: Optional[List[Node]] = None
-    ) -> Optional[List[Node]]:
+            self,
+            node: Node,
+            visited: Set[Node],
+            path: Optional[List[Node]] = None) -> Optional[List[Node]]:
         """Finds a cycle from a given node if there is one.
 
         Performs a recursive depth-first search to see if there are any cycles
@@ -102,7 +104,7 @@
 
         path.append(node)
         if node in path[:-1]:
-            return path[path.index(node) :]
+            return path[path.index(node):]
 
         if node in visited:
             path.pop()
diff --git a/ndk/hosts.py b/ndk/hosts.py
index e601ebf..a2759f2 100644
--- a/ndk/hosts.py
+++ b/ndk/hosts.py
@@ -14,7 +14,7 @@
 # limitations under the License.
 #
 """Constants and helper functions for NDK hosts."""
-from __future__ import annotations
+from __future__ import absolute_import
 
 import enum
 import sys
@@ -24,10 +24,10 @@
 class Host(enum.Enum):
     """Enumeration of supported hosts."""
 
-    Darwin = "darwin"
-    Linux = "linux"
+    Darwin = 'darwin'
+    Linux = 'linux'
     # TODO: Just Windows now that we only have the one.
-    Windows64 = "windows64"
+    Windows64 = 'windows64'
 
     # TODO: Remove.
     @property
@@ -35,74 +35,23 @@
         """Returns True if the given host is Windows."""
         return self == Host.Windows64
 
-    @property
-    def tag(self) -> str:
-        return host_to_tag(self)
 
-    @property
-    def platform_tag(self) -> str:
-        """Returns the tag used for this host in the platform tree.
-
-        The NDK uses full architecture names like x86_64, whereas the platform
-        has always used just x86, even for the 64-bit tools.
-        """
-        if self is Host.Windows64:
-            # The value for this is still "windows64" since we historically
-            # supported 32-bit Windows. Can clean this up if we ever fix the
-            # value of the enum.
-            return "windows-x86"
-        return f"{self.value}-x86"
-
-    @property
-    def exe_suffix(self) -> str:
-        if self is Host.Windows64:
-            return ".exe"
-        return ""
-
-    @classmethod
-    def current(cls) -> Host:
-        """Returns the Host matching the current machine."""
-        # Mypy is rather picky about how these are written. `startswith` and `==` work
-        # fine, but `in` behaves differently. The pattern here comes straight from the
-        # mypy docs, so better work.
-        # https://mypy.readthedocs.io/en/stable/common_issues.html#version-and-platform-checks
-        #
-        # But of course pylint thinks we *shouldn't* do that...
-        # pylint: disable=no-else-return
-        if sys.platform == "linux":
-            return Host.Linux
-        elif sys.platform == "darwin":
-            return Host.Darwin
-        elif sys.platform == "win32":
-            return Host.Windows64
-        else:
-            raise RuntimeError(f"Unsupported host: {sys.platform}")
-
-    @classmethod
-    def from_tag(cls, tag: str) -> Host:
-        if tag == "darwin-x86_64":
-            return Host.Darwin
-        if tag == "linux-x86_64":
-            return Host.Linux
-        if tag == "windows-x86_64":
-            return Host.Windows64
-        raise ValueError(f"Unrecognized host tag: {tag}")
+ALL_HOSTS = list(Host)
 
 
-def get_host_tag() -> str:
-    """Returns the host tag used for testing on the current host."""
-    # mypy prunes unreachable code fairly aggressively with sys.platform, so if this
-    # doesn't use elif mypy will actually complain that the checks after the check for
-    # the OS doing the linting are unreachable. That actually does mean that mypy isn't
-    # checking most of this function because it quits looking after the first condition.
-    # https://github.com/python/mypy/issues/5678
-    if sys.platform.startswith("linux"):  # pylint: disable=no-else-return
-        return "linux-x86_64"
-    elif sys.platform == "darwin":
-        return "darwin-x86_64"
-    elif sys.platform == "win32":
-        return "windows-x86_64"
-    raise ValueError("Unknown host: {}".format(sys.platform))
+def get_host_tag(ndk_path: str) -> str:
+    """Returns the host tag used for testing on the current host.
+
+    For Windows, the result depends on the NDK in question since a 64-bit host
+    may be used to test the 32-bit NDK.
+    """
+    if sys.platform.startswith('linux'):
+        return 'linux-x86_64'
+    elif sys.platform == 'darwin':
+        return 'darwin-x86_64'
+    elif sys.platform == 'win32':
+        return 'windows-x86_64'
+    raise ValueError('Unknown host: {}'.format(sys.platform))
 
 
 def host_to_tag(host: Host) -> str:
@@ -118,12 +67,19 @@
     # TODO: Clean up since this can all be + -x86_64 once we rename the windows
     # value.
     if not host.is_windows:
-        return host.value + "-x86_64"
-    if host == Host.Windows64:
-        return "windows-x86_64"
+        return host.value + '-x86_64'
+    elif host == Host.Windows64:
+        return 'windows-x86_64'
     raise NotImplementedError
 
 
 def get_default_host() -> Host:
     """Returns the Host matching the current machine."""
-    return Host.current()
+    if sys.platform in ('linux', 'linux2'):
+        return Host.Linux
+    elif sys.platform == 'darwin':
+        return Host.Darwin
+    elif sys.platform == 'win32':
+        return Host.Windows64
+    else:
+        raise RuntimeError(f'Unsupported host: {sys.platform}')
diff --git a/ndk/ndkbuild.py b/ndk/ndkbuild.py
index 3e477e2..a4fcc29 100644
--- a/ndk/ndkbuild.py
+++ b/ndk/ndkbuild.py
@@ -17,25 +17,15 @@
 from __future__ import absolute_import
 
 import os
-import subprocess
-from pathlib import Path
-from subprocess import CompletedProcess
+from typing import List, Tuple
+
+import ndk.ext.subprocess
 
 
-def make_build_command(ndk_path: Path, build_flags: list[str]) -> list[str]:
-    ndk_build_path = ndk_path / "ndk-build"
-    cmd = [str(ndk_build_path)] + build_flags
-    if os.name == "nt":
-        cmd = ["cmd", "/c"] + cmd
-    return cmd
-
-
-def build(ndk_path: Path, build_flags: list[str]) -> CompletedProcess[str]:
+def build(ndk_path: str, build_flags: List[str]) -> Tuple[int, str]:
     """Invokes ndk-build with the given arguments."""
-    return subprocess.run(
-        make_build_command(ndk_path, build_flags),
-        check=False,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
+    ndk_build_path = os.path.join(ndk_path, 'ndk-build')
+    cmd = [ndk_build_path] + build_flags
+    if os.name == 'nt':
+        cmd = ['cmd', '/c'] + cmd
+    return ndk.ext.subprocess.call_output(cmd, encoding='utf-8')
diff --git a/ndk/ndkversionheadergenerator.py b/ndk/ndkversionheadergenerator.py
deleted file mode 100644
index 487f16d..0000000
--- a/ndk/ndkversionheadergenerator.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import textwrap
-from pathlib import Path
-
-
-class NdkVersionHeaderGenerator:
-    def __init__(
-        self, major: int, minor: int, beta: int, build_number: int, canary: bool
-    ) -> None:
-        self.major = major
-        self.minor = minor
-        self.beta = beta
-        self.build_number = build_number
-        self.canary = canary
-
-    def generate_str(self) -> str:
-        canary = 1 if self.canary else 0
-        return textwrap.dedent(
-            f"""\
-            #pragma once
-
-            /**
-             * Set to 1 if this is an NDK, unset otherwise. See
-             * https://android.googlesource.com/platform/bionic/+/master/docs/defines.md.
-             */
-            #define __ANDROID_NDK__ 1
-
-            /**
-             * Major version of this NDK.
-             *
-             * For example: 16 for r16.
-             */
-            #define __NDK_MAJOR__ {self.major}
-
-            /**
-             * Minor version of this NDK.
-             *
-             * For example: 0 for r16 and 1 for r16b.
-             */
-            #define __NDK_MINOR__ {self.minor}
-
-            /**
-             * Set to 0 if this is a release build, or 1 for beta 1,
-             * 2 for beta 2, and so on.
-             */
-            #define __NDK_BETA__ {self.beta}
-
-            /**
-             * Build number for this NDK.
-             *
-             * For a local development build of the NDK, this is 0.
-             */
-            #define __NDK_BUILD__ {self.build_number}
-
-            /**
-             * Set to 1 if this is a canary build, 0 if not.
-             */
-            #define __NDK_CANARY__ {canary}
-            """
-        )
-
-    def write(self, output: Path) -> None:
-        output.write_text(self.generate_str())
diff --git a/ndk/notify.py b/ndk/notify.py
index bbc2fe4..fbaa80b 100644
--- a/ndk/notify.py
+++ b/ndk/notify.py
@@ -32,7 +32,7 @@
         body: Optional additional text to display.
     """
     try:
-        args = ["notify-send", subject]
+        args = ['notify-send', subject]
         if body is not None:
             args.append(body)
         subprocess.call(args)
diff --git a/ndk/packaging.py b/ndk/packaging.py
new file mode 100644
index 0000000..8ff653d
--- /dev/null
+++ b/ndk/packaging.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""NDK packaging APIs."""
+from __future__ import absolute_import
+
+import os
+import shutil
+import subprocess
+import tempfile
+from typing import Iterable, List, Optional, Set, Tuple
+
+import ndk.abis
+from ndk.hosts import Host, host_to_tag
+
+
+PACKAGE_VARIANTS = (
+    'abi',
+    'arch',
+    'host',
+    'toolchain',
+    'triple',
+)
+
+
+def expand_paths(package: str, host: Host,
+                 arches: Optional[Iterable[ndk.abis.Arch]]) -> List[str]:
+    """Expands package definition tuple into list of full package names.
+
+    >>> expand_paths('gcc-{toolchain}-{host}', Host.Linux, ['arm', 'x86_64'])
+    ['gcc-arm-linux-androideabi-linux-x86_64', 'gcc-x86_64-linux-x86_64']
+
+    >>> expand_paths('gdbserver-{arch}', Host.Linux, ['arm64', 'x86_64'])
+    ['gdbserver-arm64', 'gdbserver-x86_64']
+
+    >>> expand_paths('llvm-{host}', Host.Linux, None)
+    ['llvm-linux-x86_64']
+
+    >>> expand_paths('platforms', Host.Linux, ['arm'])
+    ['platforms']
+
+    >>> expand_paths('libc++-{abi}', Host.Linux, ['arm'])
+    ['libc++-armeabi-v7a']
+
+    >>> expand_paths('binutils/{triple}', Host.Linux, ['arm', 'x86_64'])
+    ['binutils/arm-linux-androideabi', 'binutils/x86_64-linux-android']
+
+    >> expand_paths('toolchains/{toolchain}-4.9', Host.Linux, ['arm', 'x86'])
+    ['toolchains/arm-linux-androideabi-4.9', 'toolchains/x86-4.9']
+    """
+    host_tag = host_to_tag(host)
+    if arches is None:
+        return [package.format(host=host_tag)]
+
+    seen_packages: Set[str] = set()
+    packages = []
+    for arch in arches:
+        triple = ndk.abis.arch_to_triple(arch)
+        toolchain = ndk.abis.arch_to_toolchain(arch)
+        for abi in ndk.abis.arch_to_abis(arch):
+            expanded = package.format(
+                abi=abi, arch=arch, host=host_tag, triple=triple,
+                toolchain=toolchain)
+            if expanded not in seen_packages:
+                packages.append(expanded)
+            seen_packages.add(expanded)
+    return packages
+
+
+def package_varies_by(install_path: str, variant: str) -> bool:
+    """Determines if a package varies by a given input.
+
+    >>> package_varies_by('foo-{host}', 'host')
+    True
+
+    >>> package_varies_by('foo', 'host')
+    False
+
+    >>> package_varies_by('foo-{arch}', 'host')
+    False
+    """
+
+    if variant not in PACKAGE_VARIANTS:
+        raise ValueError
+
+    variant_replacement_str = '{' + variant + '}'
+    return variant_replacement_str in install_path
+
+
+def expand_packages(package: str, install_path: str, host: Host,
+                    arches: List[ndk.abis.Arch]) -> Iterable[Tuple[str, str]]:
+    """Returns a list of tuples of `(package, install_path)`."""
+    package_template = package
+    for variant in PACKAGE_VARIANTS:
+        if package_varies_by(install_path, variant):
+            package_template += '-{' + variant + '}'
+
+    expanded_packages = expand_paths(package_template, host, arches)
+    expanded_installs = expand_paths(install_path, host, arches)
+    return zip(expanded_packages, expanded_installs)
+
+
+def extract_zip(package_path: str, install_path: str) -> None:
+    """Extracts the contents of a zipfile to a directory.
+
+    This behaves similar to the following shell commands (using tar instead of
+    zip because `unzip` doesn't support `--strip-components`):
+
+        mkdir -p $install_path
+        tar xf $package_path -C $install_path --strip-components=1
+
+    That is, the first directory in the package is stripped and the contents
+    are placed in the install path.
+
+    Args:
+        package_path: Path to the zip file to extract.
+        install_path: Directory in which to extract zip contents.
+
+    Raises:
+        RuntimeError: The zip file was not in the allowed format. i.e. the zip
+                      had more than one top level directory or was empty.
+    """
+    package_name = os.path.basename(package_path)
+    extract_dir = tempfile.mkdtemp()
+    try:
+        subprocess.check_call(
+            ['unzip', '-q', package_path, '-d', extract_dir])
+        dirs = os.listdir(extract_dir)
+        if len(dirs) > 1:
+            msg = 'Package has more than one root directory: ' + package_name
+            raise RuntimeError(msg)
+        if not dirs:
+            raise RuntimeError('Package was empty: ' + package_name)
+        parent_dir = os.path.dirname(install_path)
+        if not os.path.exists(parent_dir):
+            os.makedirs(parent_dir)
+        shutil.move(os.path.join(extract_dir, dirs[0]), install_path)
+    finally:
+        shutil.rmtree(extract_dir)
diff --git a/ndk/paths.py b/ndk/paths.py
index 5cb78a2..01d9bf4 100644
--- a/ndk/paths.py
+++ b/ndk/paths.py
@@ -14,48 +14,51 @@
 # limitations under the License.
 #
 """Helper functions for NDK build and test paths."""
-import os
-from pathlib import Path, PurePosixPath
-from typing import Callable, Iterator, Optional
+from __future__ import absolute_import
 
+import os
+from pathlib import Path
+import sys
+from typing import Optional
+
+import ndk.abis
 import ndk.config
 import ndk.hosts
 
+
 ANDROID_DIR = Path(__file__).resolve().parents[2]
-NDK_DIR = ANDROID_DIR / "ndk"
-PREBUILT_SYSROOT = ANDROID_DIR / "prebuilts/ndk/platform/sysroot"
-DEVICE_TEST_BASE_DIR = PurePosixPath("/data/local/tmp/tests")
+NDK_DIR = ANDROID_DIR / 'ndk'
 
 
-def android_path(*args: str) -> Path:
+def android_path(*args: str) -> str:
     """Returns the absolute path rooted within the top level source tree."""
-    return ANDROID_DIR.joinpath(*args)
+    return str(ANDROID_DIR.joinpath(*args))
 
 
-def ndk_path(*args: str) -> Path:
+def ndk_path(*args: str) -> str:
     """Returns the absolute path rooted within the NDK source tree."""
-    return android_path("ndk", *args)
+    return android_path('ndk', *args)
 
 
-def toolchain_path(*args: str) -> Path:
+def sysroot_path(toolchain: ndk.abis.Toolchain) -> str:
+    """Returns the path to the prebuilt sysroot for the given toolchain."""
+    arch = ndk.abis.toolchain_to_arch(toolchain)
+    # Only ARM has more than one ABI, and they both have the same minimum
+    # platform level.
+    abi = ndk.abis.arch_to_abis(arch)[0]
+    version = ndk.abis.min_api_for_abi(abi)
+
+    prebuilt_ndk = 'prebuilts/ndk/current'
+    sysroot_subpath = 'platforms/android-{}/arch-{}'.format(version, arch)
+    return android_path(prebuilt_ndk, sysroot_subpath)
+
+
+def toolchain_path(*args: str) -> str:
     """Returns a path within the toolchain subdirectory."""
-    return android_path("toolchain", *args)
+    return android_path('toolchain', *args)
 
 
-def expand_path(path: Path, host: ndk.hosts.Host) -> Path:
-    """Expands package definition tuple into a package name.
-
-    >>> expand_path('llvm-{host}', Host.Linux)
-    'llvm-linux-x86_64'
-
-    >>> expand_path('platforms', Host.Linux)
-    'platforms'
-    """
-    host_tag = ndk.hosts.host_to_tag(host)
-    return Path(str(path).format(host=host_tag))
-
-
-def _get_dir_from_env(default: Path, env_var: str) -> Path:
+def _get_dir_from_env(default: str, env_var: str) -> str:
     """Returns the path to a directory specified by the environment.
 
     If the environment variable is not set, the default will be used. The
@@ -68,29 +71,27 @@
     Returns:
         The absolute path to the directory.
     """
-    path = Path(os.getenv(env_var, default))
-    if not path.is_dir():
-        path.mkdir(parents=True)
-    return path.resolve()
+    path = os.path.realpath(os.getenv(env_var, default))
+    if not os.path.isdir(path):
+        os.makedirs(path)
+    return path
 
 
-def get_out_dir() -> Path:
+def get_out_dir() -> str:
     """Returns the out directory."""
-    return _get_dir_from_env(android_path("out"), "OUT_DIR")
+    return _get_dir_from_env(android_path('out'), 'OUT_DIR')
 
 
-def get_dist_dir(out_dir: Optional[Path] = None) -> Path:
+def get_dist_dir(out_dir: str) -> str:
     """Returns the distribution directory.
 
     The contents of the distribution directory are archived on the build
     servers. Suitable for build logs and final artifacts.
     """
-    if out_dir is None:
-        out_dir = get_out_dir()
-    return _get_dir_from_env(out_dir / "dist", "DIST_DIR")
+    return _get_dir_from_env(os.path.join(out_dir, 'dist'), 'DIST_DIR')
 
 
-def path_in_out(dirname: Path, out_dir: Optional[Path] = None) -> Path:
+def path_in_out(dirname: str, out_dir: Optional[str] = None) -> str:
     """Returns a path within the out directory."
 
     Args:
@@ -104,12 +105,11 @@
     """
     if out_dir is None:
         out_dir = get_out_dir()
-    return out_dir / dirname
+    return os.path.join(out_dir, dirname)
 
 
-def get_install_path(
-    out_dir: Optional[Path] = None, host: Optional[ndk.hosts.Host] = None
-) -> Path:
+def get_install_path(out_dir: Optional[str] = None,
+                     host: Optional[ndk.hosts.Host] = None) -> str:
     """Returns the built NDK install path.
 
     Note that the path returned might not actually contain the NDK. The NDK may
@@ -129,41 +129,13 @@
     """
     if host is None:
         host = ndk.hosts.get_default_host()
-    release_name = f"android-ndk-{ndk.config.release}"
-    return path_in_out(Path(host.value) / release_name, out_dir)
+    release_name = f'android-ndk-{ndk.config.release}'
+    return path_in_out(os.path.join(host.value, release_name), out_dir)
 
 
-def walk(
-    path: Path,
-    top_down: bool = True,
-    on_error: Optional[Callable[[OSError], None]] = None,
-    follow_links: bool = False,
-    directories: bool = True,
-) -> Iterator[Path]:
-    """Recursively iterates through files in a directory.
-
-    This is a pathlib equivalent of os.walk, which Python inexplicably still
-    does not have in the standard library.
-
-    Args:
-        path: Directory tree to walk.
-        top_down: If True, walk the tree top-down. If False, walk the tree
-                  bottom-up.
-        on_error: An error handling callback for any OSError raised by the
-                  walk.
-        follow_links: If True, walk into symbolic links that resolve to
-                      directories.
-        directories: If True, the walk will also yield directories.
-    Yields:
-        A Path for each file (and optionally each directory) in the same manner
-        as os.walk.
-    """
-    for root, dirs, files in os.walk(
-        str(path), topdown=top_down, onerror=on_error, followlinks=follow_links
-    ):
-        root_path = Path(root)
-        if directories:
-            for dir_name in dirs:
-                yield root_path / dir_name
-        for file_name in files:
-            yield root_path / file_name
+def to_posix_path(path: str) -> str:
+    """Replaces backslashes with forward slashes on Windows."""
+    if sys.platform == 'win32':
+        return path.replace('\\', '/')
+    else:
+        return path
diff --git a/ndk/platforms.py b/ndk/platforms.py
deleted file mode 100644
index 75d161d..0000000
--- a/ndk/platforms.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Wrapper around meta/platforms.json."""
-import json
-
-from .paths import NDK_DIR
-
-
-def _load_data() -> tuple[int, int, dict[str, int]]:
-    """Loads and returns the min and max supported versions."""
-    with (NDK_DIR / "meta/platforms.json").open() as platforms:
-        data = json.load(platforms)
-    return data["min"], data["max"], data["aliases"]
-
-
-MIN_API_LEVEL, MAX_API_LEVEL, API_LEVEL_ALIASES = _load_data()
-ALL_API_LEVELS = list(range(MIN_API_LEVEL, MAX_API_LEVEL + 1))
-FIRST_LP64_API_LEVEL = 21
-FIRST_RISCV64_API_LEVEL = 35
diff --git a/ndk/pythonenv.py b/ndk/pythonenv.py
deleted file mode 100644
index ebf92bf..0000000
--- a/ndk/pythonenv.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Tools for verifying and fixing our Python environment."""
-import shutil
-import site
-import sys
-import textwrap
-
-PYTHON_DOCS = "https://android.googlesource.com/platform/ndk/+/master/docs/Building.md#python-environment-setup"
-
-
-def ensure_poetry_if_available() -> None:
-    if shutil.which("poetry") is None:
-        return
-    if "pypoetry" not in sys.executable:
-        sys.exit(
-            textwrap.fill(
-                f"Poetry is installed but {sys.executable} does not appear to be a "
-                f"Poetry environment. Follow {PYTHON_DOCS} to set up your Python "
-                "environment. If you have already configured your environment, "
-                "remember to run `poetry shell` to start a shell with the correct "
-                "environment, or prefix NDK commands with `poetry run`.",
-                break_long_words=False,
-                break_on_hyphens=False,
-            )
-        )
-
-
-def purge_user_site_packages() -> None:
-    if site.ENABLE_USER_SITE:
-        sys.path = [p for p in sys.path if p != site.getusersitepackages()]
-
-
-def ensure_python_environment() -> None:
-    """Verifies that the current Python environment is what we expect."""
-    ensure_poetry_if_available()
-    purge_user_site_packages()
diff --git a/ndk/run_tests.py b/ndk/run_tests.py
index 9eb27f3..69423b4 100755
--- a/ndk/run_tests.py
+++ b/ndk/run_tests.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2017 The Android Open Source Project
 #
@@ -15,60 +15,68 @@
 # limitations under the License.
 #
 """Runs the tests built by make_tests.py."""
-from __future__ import absolute_import, print_function
+from __future__ import absolute_import
+from __future__ import print_function
 
 import argparse
 import collections
 import datetime
+import json
 import logging
+import os
+import posixpath
 import random
-import shutil
 import site
 import subprocess
 import sys
 import time
-from collections.abc import Iterator
-from contextlib import contextmanager
-from pathlib import Path, PurePosixPath
-from typing import Dict, Iterable, List, Mapping, Optional
+import traceback
+from typing import (
+    Any,
+    Callable,
+    Dict,
+    Iterable,
+    List,
+    Mapping,
+    Optional,
+    Sequence,
+    Tuple,
+    Union,
+)
 
+from ndk.abis import Abi
 import ndk.ansi
-import ndk.archive
 import ndk.ext.subprocess
 import ndk.notify
 import ndk.paths
 import ndk.test.builder
-import ndk.test.buildtest.case
-import ndk.test.ui
-import ndk.ui
+from ndk.test.config import DeviceTestConfig, LibcxxTestConfig
 from ndk.test.devices import (
     Device,
-    DeviceConfig,
     DeviceFleet,
     DeviceShardingGroup,
     find_devices,
 )
-from ndk.test.devicetest.case import TestCase
-from ndk.test.devicetest.scanner import ConfigFilter, enumerate_tests
 from ndk.test.filters import TestFilter
 from ndk.test.printers import Printer, StdoutPrinter
 from ndk.test.report import Report
 from ndk.test.result import (
     ExpectedFailure,
     Failure,
-    ResultTranslations,
     Skipped,
     Success,
     TestResult,
     UnexpectedSuccess,
 )
-from ndk.test.spec import BuildConfiguration, TestSpec
+from ndk.test.spec import BuildConfiguration
+import ndk.test.types
+import ndk.test.ui
 from ndk.timer import Timer
+import ndk.ui
 from ndk.workqueue import ShardingWorkQueue, Worker, WorkQueue
 
-from .pythonenv import ensure_python_environment
 
-AdbResult = tuple[int, str, str, str]
+DEVICE_TEST_BASE_DIR = '/data/local/tmp/tests'
 
 
 def logger() -> logging.Logger:
@@ -76,10 +84,149 @@
     return logging.getLogger(__name__)
 
 
+def shell_nocheck_wrap_errors(device: Device,
+                              cmd: Sequence[str]) -> Tuple[int, str, str]:
+    """Invokes device.shell_nocheck and wraps exceptions as failed commands."""
+    try:
+        return device.shell_nocheck(cmd)
+    except RuntimeError:
+        return 1, traceback.format_exc(), ''
+
+
+# TODO: Extract a common interface from this and ndk.test.types.Test for the
+# printer.
+class TestCase:
+    """A test case found in the dist directory.
+
+    The test directory is structured as tests/dist/$CONFIG/$BUILD_SYTEM/...
+    What follows depends on the type of test case. Each discovered test case
+    will have a name, a build configuration, a build system, and a device
+    directory.
+    """
+
+    def __init__(self, name: str, test_src_dir: str,
+                 config: BuildConfiguration, build_system: str,
+                 device_dir: str) -> None:
+        self.name = name
+        self.test_src_dir = test_src_dir
+        self.config = config
+        self.build_system = build_system
+        self.device_dir = device_dir
+
+    def check_unsupported(self, device: Device) -> Optional[str]:
+        raise NotImplementedError
+
+    def check_broken(
+            self, device: Device) -> Union[Tuple[None, None], Tuple[str, str]]:
+        raise NotImplementedError
+
+    def run(self, device: Device) -> Tuple[int, str, str]:
+        raise NotImplementedError
+
+
+class BasicTestCase(TestCase):
+    """A test case for the standard NDK test builder.
+
+    These tests were written specifically for the NDK and thus follow the
+    layout we expect. In each test configuration directory, we have
+    $TEST_SUITE/$ABI/$TEST_FILES. $TEST_FILES includes both the shared
+    libraries for the test and the test executables.
+    """
+
+    def __init__(self, suite: str, executable: str, test_src_dir: str,
+                 config: BuildConfiguration, build_system: str,
+                 device_dir: str) -> None:
+        name = '.'.join([suite, executable])
+        super().__init__(name, test_src_dir, config, build_system, device_dir)
+
+        self.suite = suite
+        self.executable = executable
+
+    def get_test_config(self) -> DeviceTestConfig:
+        # We don't run anything in tests/build, and the libc++ tests are
+        # handled by a different LibcxxTest. We can safely assume that anything
+        # here is in tests/device.
+        test_dir = os.path.join(self.test_src_dir, 'device', self.suite)
+        return DeviceTestConfig.from_test_dir(test_dir)
+
+    def check_unsupported(self, device: Device) -> Optional[str]:
+        return self.get_test_config().run_unsupported(self, device)
+
+    def check_broken(
+            self, device: Device) -> Union[Tuple[None, None], Tuple[str, str]]:
+        return self.get_test_config().run_broken(self, device)
+
+    def run(self, device: Device) -> Tuple[int, str, str]:
+        cmd = 'cd {} && LD_LIBRARY_PATH={} ./{} 2>&1'.format(
+            self.device_dir, self.device_dir, self.executable)
+        logger().info('%s: shell_nocheck "%s"', device.name, cmd)
+        return shell_nocheck_wrap_errors(device, [cmd])
+
+
+class LibcxxTestCase(TestCase):
+    """A libc++ test case built by LIT.
+
+    LIT's test structure doesn't map cleanly to ours; they have a hierarchical
+    test structure. The top level contains a single "libc++" directory. In that
+    directory is where shared libraries common to all tests are placed. That
+    directory and any under it may contain test executables (always suffixed
+    with ".exe") or test data (always suffixed with ".dat").
+    """
+
+    def __init__(self, suite: str, executable: str, test_src_dir: str,
+                 config: BuildConfiguration, device_dir: str) -> None:
+        # Tests in the top level don't need any mangling to match the filters.
+        if suite == 'libc++':
+            filter_name = executable
+        else:
+            filter_name = os.path.join(suite[len('libc++/'):], executable)
+
+        # The executable name ends with .exe. Remove that so it matches the
+        # filter that would be used to build the test.
+        name = '.'.join(['libc++', filter_name[:-4]])
+        super(LibcxxTestCase, self).__init__(
+            name, test_src_dir, config, 'libc++', device_dir)
+
+        self.suite = suite
+        self.executable = executable
+
+    @property
+    def case_name(self) -> str:
+        # Executable is foo.pass.cpp.exe, we want foo.pass.
+        return os.path.splitext(os.path.splitext(self.executable)[0])[0]
+
+    def get_test_config(self) -> DeviceTestConfig:
+        _, _, test_subdir = self.suite.partition('/')
+        test_dir = os.path.join(self.test_src_dir, 'libc++/test', test_subdir)
+        return LibcxxTestConfig.from_test_dir(test_dir)
+
+    def check_unsupported(self, device: Device) -> Optional[str]:
+        config = self.get_test_config().run_unsupported(self, device)
+        if config is not None:
+            return config
+        return None
+
+    def check_broken(
+            self, device: Device) -> Union[Tuple[None, None], Tuple[str, str]]:
+        config, bug = self.get_test_config().run_broken(self, device)
+        if config is not None:
+            return config, bug
+        return None, None
+
+    def run(self, device: Device) -> Tuple[int, str, str]:
+        libcxx_so_dir = posixpath.join(
+            DEVICE_TEST_BASE_DIR, str(self.config), 'libcxx/libc++')
+        cmd = 'cd {} && LD_LIBRARY_PATH={} ./{} 2>&1'.format(
+            self.device_dir, libcxx_so_dir, self.executable)
+        logger().info('%s: shell_nocheck "%s"', device.name, cmd)
+        return shell_nocheck_wrap_errors(device, [cmd])
+
+
 class TestRun:
     """A test case mapped to the device group it will run on."""
 
-    def __init__(self, test_case: TestCase, device_group: DeviceShardingGroup) -> None:
+    def __init__(self, test_case: TestCase,
+                 device_group: DeviceShardingGroup) -> None:
         self.test_case = test_case
         self.device_group = device_group
 
@@ -95,41 +242,171 @@
     def config(self) -> BuildConfiguration:
         return self.test_case.config
 
-    def make_result(self, adb_result: AdbResult, device: Device) -> TestResult:
-        status, out, _, cmd = adb_result
+    def make_result(self, adb_result_tuple: Tuple[int, str, str],
+                    device: Device) -> TestResult:
+        status, out, _ = adb_result_tuple
         result: TestResult
         if status == 0:
             result = Success(self)
         else:
-            out = "\n".join([str(device), out])
-            result = Failure(self, out, cmd, self.device_group)
+            out = '\n'.join([str(device), out])
+            result = Failure(self, out)
         return self.fixup_xfail(result, device)
 
     def fixup_xfail(self, result: TestResult, device: Device) -> TestResult:
-        config, bug = self.test_case.check_broken(device.config())
+        config, bug = self.test_case.check_broken(device)
         if config is not None:
             assert bug is not None
             if result.failed():
-                assert isinstance(result, Failure)
-                return ExpectedFailure(self, result.message, config, bug)
-            if result.passed():
+                return ExpectedFailure(self, config, bug)
+            elif result.passed():
                 return UnexpectedSuccess(self, config, bug)
-            raise ValueError("Test result must have either failed or passed.")
+            raise ValueError('Test result must have either failed or passed.')
         return result
 
     def run(self, device: Device) -> TestResult:
-        config = self.test_case.check_unsupported(device.config())
+        config = self.test_case.check_unsupported(device)
         if config is not None:
-            return Skipped(self, f"test unsupported for {config}")
+            return Skipped(self, f'test unsupported for {config}')
         return self.make_result(self.test_case.run(device), device)
 
-    def __str__(self) -> str:
-        return f"{self.name} [{self.config} running on API {self.device_group.version}]"
+
+def build_tests(test_src_dir: str, ndk_dir: str, out_dir: str, clean: bool,
+                printer: Printer, config: Dict[Any, Any],
+                test_filter: str) -> Report:
+    test_options = ndk.test.spec.TestOptions(
+        test_src_dir, ndk_dir, out_dir, test_filter=test_filter, clean=clean)
+
+    test_spec = ndk.test.builder.test_spec_from_config(config)
+    builder = ndk.test.builder.TestBuilder(test_spec, test_options, printer)
+
+    return builder.build()
+
+
+def enumerate_basic_tests(out_dir_base: str, test_src_dir: str,
+                          build_cfg: BuildConfiguration, build_system: str,
+                          test_filter: TestFilter) -> List[TestCase]:
+    tests: List[TestCase] = []
+    tests_dir = os.path.join(out_dir_base, str(build_cfg), build_system)
+    if not os.path.exists(tests_dir):
+        return tests
+
+    for test_subdir in os.listdir(tests_dir):
+        test_dir = os.path.join(tests_dir, test_subdir)
+        out_dir = os.path.join(test_dir, build_cfg.abi)
+        test_relpath = os.path.relpath(out_dir, out_dir_base)
+        device_dir = posixpath.join(
+            DEVICE_TEST_BASE_DIR, ndk.paths.to_posix_path(test_relpath))
+        for test_file in os.listdir(out_dir):
+            if test_file.endswith('.so'):
+                continue
+            if test_file.endswith('.sh'):
+                continue
+            name = '.'.join([test_subdir, test_file])
+            if not test_filter.filter(name):
+                continue
+            tests.append(BasicTestCase(
+                test_subdir, test_file, test_src_dir, build_cfg, build_system,
+                device_dir))
+    return tests
+
+
+def enumerate_libcxx_tests(out_dir_base: str, test_src_dir: str,
+                           build_cfg: BuildConfiguration, build_system: str,
+                           test_filter: TestFilter) -> List[TestCase]:
+    tests: List[TestCase] = []
+    tests_dir = os.path.join(out_dir_base, str(build_cfg), build_system)
+    if not os.path.exists(tests_dir):
+        return tests
+
+    for root, _, files in os.walk(tests_dir):
+        for test_file in files:
+            if not test_file.endswith('.exe'):
+                continue
+            test_relpath = os.path.relpath(root, out_dir_base)
+            device_dir = posixpath.join(
+                DEVICE_TEST_BASE_DIR, ndk.paths.to_posix_path(test_relpath))
+            suite_name = ndk.paths.to_posix_path(
+                os.path.relpath(root, tests_dir))
+
+            # Our file has a .exe extension, but the name should match the
+            # source file for the filters to work.
+            test_name = test_file[:-4]
+
+            # Tests in the top level don't need any mangling to match the
+            # filters.
+            if suite_name != 'libc++':
+                if not suite_name.startswith('libc++/'):
+                    raise ValueError(suite_name)
+                # According to the test runner, these are all part of the
+                # "libc++" test, and the rest of the data is the subtest name.
+                # i.e.  libc++/foo/bar/baz.cpp.exe is actually
+                # libc++.foo/bar/baz.cpp.  Matching this expectation here
+                # allows us to use the same filter string for running the tests
+                # as for building the tests.
+                test_path = suite_name[len('libc++/'):]
+                test_name = '/'.join([test_path, test_name])
+
+            filter_name = '.'.join(['libc++', test_name])
+            if not test_filter.filter(filter_name):
+                continue
+            tests.append(LibcxxTestCase(
+                suite_name, test_file, test_src_dir, build_cfg, device_dir))
+    return tests
+
+
+class ConfigFilter:
+    def __init__(self, test_config: Dict[Any, Any]) -> None:
+        test_spec = ndk.test.builder.test_spec_from_config(test_config)
+        self.spec = test_spec
+
+    def filter(self, build_config: BuildConfiguration) -> bool:
+        return (build_config.abi in self.spec.abis
+                and build_config.linker in self.spec.linkers)
+
+
+def enumerate_tests(test_dir: str, test_src_dir: str, test_filter: TestFilter,
+                    config_filter: ConfigFilter
+                    ) -> Dict[BuildConfiguration, List[TestCase]]:
+    tests: Dict[BuildConfiguration, List[TestCase]] = {}
+
+    # The tests directory has a directory for each type of test. For example:
+    #
+    #  * build.sh
+    #  * cmake
+    #  * libcxx
+    #  * ndk-build
+    #  * test.py
+    #
+    # We need to handle some of these differently. The test.py and build.sh
+    # type tests are build only, so we don't need to run them. The libc++ tests
+    # are built by a test runner we don't control, so its output doesn't quite
+    # match what we expect.
+    test_subdir_class_map: Dict[str, Callable[
+        [str, str, BuildConfiguration, str, TestFilter], List[TestCase]]] = {
+            'cmake': enumerate_basic_tests,
+            'libcxx': enumerate_libcxx_tests,
+            'ndk-build': enumerate_basic_tests,
+        }
+
+    for build_cfg_str in os.listdir(test_dir):
+        build_cfg = BuildConfiguration.from_string(build_cfg_str)
+        if not config_filter.filter(build_cfg):
+            continue
+
+        if build_cfg not in tests:
+            tests[build_cfg] = []
+
+        for test_type, scan_for_tests in test_subdir_class_map.items():
+            tests[build_cfg].extend(scan_for_tests(
+                test_dir, test_src_dir, build_cfg, test_type, test_filter))
+
+    return tests
 
 
 def clear_test_directory(_worker: Worker, device: Device) -> None:
-    print(f"Clearing test directory on {device}")
-    cmd = ["rm", "-r", str(ndk.paths.DEVICE_TEST_BASE_DIR)]
+    print(f'Clearing test directory on {device}')
+    cmd = ['rm', '-r', DEVICE_TEST_BASE_DIR]
     logger().info('%s: shell_nocheck "%s"', device.name, cmd)
     device.shell_nocheck(cmd)
 
@@ -144,81 +421,88 @@
 
 
 def adb_has_feature(feature: str) -> bool:
-    cmd = ["adb", "host-features"]
-    logger().info('check_output "%s"', " ".join(cmd))
-    output = subprocess.check_output(cmd).decode("utf-8")
+    cmd = ['adb', 'host-features']
+    logger().info('check_output "%s"', ' '.join(cmd))
+    output = subprocess.check_output(cmd).decode('utf-8')
     features_line = output.splitlines()[-1]
-    features = features_line.split(",")
+    features = features_line.split(',')
     return feature in features
 
 
-def push_tests_to_device(
-    worker: Worker,
-    src_dir: Path,
-    dest_dir: PurePosixPath,
-    config: BuildConfiguration,
-    device: Device,
-    use_sync: bool,
-) -> None:
-    """Pushes a directory to the given device.
-
-    Creates the parent directory on the device if needed.
-
-    Args:
-        worker: The worker performing the task.
-        src_dir: The directory to push.
-        dest_dir: The destination directory on the device. Note that when
-                  pushing a directory, dest_dir will be the parent directory,
-                  not the destination path.
-        config: The build configuration for the tests being pushed.
-        device: The device to push to.
-        use_sync: True if `adb push --sync` is supported.
-    """
-    worker.status = f"Pushing {config} tests to {device}."
-    logger().info("%s: mkdir %s", device.name, dest_dir)
-    device.shell_nocheck(["mkdir", str(dest_dir)])
+def push_tests_to_device(worker: Worker, src_dir: str, dest_dir: str,
+                         config: BuildConfiguration, device: Device,
+                         use_sync: bool) -> None:
+    worker.status = f'Pushing {config} tests to {device}.'
+    logger().info('%s: mkdir %s', device.name, dest_dir)
+    device.shell_nocheck(['mkdir', dest_dir])
     logger().info(
-        "%s: push%s %s %s",
-        device.name,
-        " --sync" if use_sync else "",
-        src_dir,
-        dest_dir,
-    )
-    device.push(str(src_dir), str(dest_dir), sync=use_sync)
-    # Tests that were built and bundled on Windows but pushed from Linux or macOS will
-    # not have execute permission by default. Since we don't know where the tests came
-    # from, chmod all the tests regardless.
-    device.shell(["chmod", "-R", "777", str(dest_dir)])
+        '%s: push%s %s %s', device.name, ' --sync' if use_sync else '',
+        src_dir, dest_dir)
+    device.push(src_dir, dest_dir, sync=use_sync)
+    if sys.platform == 'win32':
+        device.shell(['chmod', '-R', '777', dest_dir])
+
+
+def finish_workqueue_with_ui(workqueue: WorkQueue) -> None:
+    console = ndk.ansi.get_console()
+    ui = ndk.ui.get_work_queue_ui(console, workqueue)
+    with ndk.ansi.disable_terminal_echo(sys.stdin):
+        with console.cursor_hide_context():
+            ui.draw()
+            while not workqueue.finished():
+                workqueue.get_result()
+                ui.draw()
+            ui.clear()
 
 
 def push_tests_to_devices(
-    workqueue: WorkQueue,
-    test_dir: Path,
-    groups_for_config: Mapping[BuildConfiguration, Iterable[DeviceShardingGroup]],
-    use_sync: bool,
-) -> None:
-    dest_dir = ndk.paths.DEVICE_TEST_BASE_DIR
+        workqueue: WorkQueue, test_dir: str,
+        groups_for_config: Mapping[BuildConfiguration,
+                                   Iterable[DeviceShardingGroup]],
+        use_sync: bool) -> None:
+    dest_dir = DEVICE_TEST_BASE_DIR
     for config, groups in groups_for_config.items():
-        src_dir = test_dir / str(config)
+        src_dir = os.path.join(test_dir, str(config))
         for group in groups:
             for device in group.devices:
                 workqueue.add_task(
-                    push_tests_to_device, src_dir, dest_dir, config, device, use_sync
-                )
+                    push_tests_to_device, src_dir, dest_dir, config, device,
+                    use_sync)
 
-    ndk.ui.finish_workqueue_with_ui(workqueue, ndk.ui.get_work_queue_ui)
-    print("Finished pushing tests")
+    finish_workqueue_with_ui(workqueue)
+    print('Finished pushing tests')
+
+
+def disable_verity_and_wait_for_reboot(device: Device) -> None:
+    if device.get_prop('ro.boot.veritymode') != 'enforcing':
+        return
+
+    logger().info('%s: root', device.name)
+    device.root()
+
+    logger().info('%s: disable-verity', device.name)
+    cmd = ['adb', '-s', device.serial, 'disable-verity']
+    # disable-verity doesn't set exit status
+    _, out = ndk.ext.subprocess.call_output(cmd, encoding='utf-8')
+    logger().info('%s: disable-verity:\n%s', device, out)
+    if 'disabled on /' not in out:
+        raise RuntimeError(f'{device}: adb disable-verity failed:\n{out}')
+
+    if 'reboot your device' in out:
+        logger().info('%s: reboot', device.name)
+        device.reboot()
+        logger().info('%s: wait-for-device', device.name)
+        device.wait()
 
 
 def run_test(worker: Worker, test: TestRun) -> TestResult:
     device = worker.data[0]
-    worker.status = f"Running {test.name}"
+    worker.status = f'Running {test.name}'
     return test.run(device)
 
 
 def print_test_stats(
-    test_groups: Mapping[BuildConfiguration, Iterable[TestCase]]
-) -> None:
+        test_groups: Mapping[BuildConfiguration, Iterable[TestCase]]) -> None:
     test_stats: Dict[BuildConfiguration, Dict[str, List[TestCase]]] = {}
     for config, tests in test_groups.items():
         test_stats[config] = {}
@@ -228,33 +512,33 @@
             test_stats[config][test.build_system].append(test)
 
     for config, build_system_groups in test_stats.items():
-        print(f"Config {config}:")
+        print(f'Config {config}:')
         for build_system, tests in build_system_groups.items():
-            print(f"\t{build_system}: {len(tests)} tests")
+            print(f'\t{build_system}: {len(tests)} tests')
 
 
 def verify_have_all_requested_devices(fleet: DeviceFleet) -> bool:
     missing_configs = fleet.get_missing()
     if missing_configs:
         logger().warning(
-            "Missing device configurations: %s",
-            ", ".join(str(c) for c in missing_configs),
-        )
+            'Missing device configurations: %s', ', '.join(missing_configs))
         return False
     return True
 
 
 def find_configs_with_no_device(
-    groups_for_config: Mapping[BuildConfiguration, Iterable[DeviceShardingGroup]]
+        groups_for_config: Mapping[BuildConfiguration,
+                                   Iterable[DeviceShardingGroup]]
 ) -> List[BuildConfiguration]:
     return [c for c, gs in groups_for_config.items() if not gs]
 
 
 def match_configs_to_device_groups(
-    fleet: DeviceFleet, configs: Iterable[BuildConfiguration]
+        fleet: DeviceFleet, configs: Iterable[BuildConfiguration]
 ) -> Dict[BuildConfiguration, List[DeviceShardingGroup]]:
     groups_for_config: Dict[BuildConfiguration, List[DeviceShardingGroup]] = {
-        config: [] for config in configs
+        config: []
+        for config in configs
     }
     for config in configs:
         for group in fleet.get_unique_device_groups():
@@ -268,10 +552,9 @@
 
 
 def pair_test_runs(
-    test_groups: Mapping[BuildConfiguration, Iterable[TestCase]],
-    groups_for_config: Mapping[BuildConfiguration, Iterable[DeviceShardingGroup]],
-    report: Report[DeviceShardingGroup],
-    fleet: DeviceFleet,
+        test_groups: Mapping[BuildConfiguration, Iterable[TestCase]],
+        groups_for_config: Mapping[BuildConfiguration,
+                                   Iterable[DeviceShardingGroup]]
 ) -> List[TestRun]:
     """Creates a TestRun object for each device/test case pairing."""
     test_runs = []
@@ -279,50 +562,27 @@
         if not test_cases:
             continue
 
-        report_skipped_tests_for_missing_devices(report, config, fleet, test_cases)
         for group in groups_for_config[config]:
             test_runs.extend([TestRun(tc, group) for tc in test_cases])
     return test_runs
 
 
-def report_skipped_tests_for_missing_devices(
-    report: Report[DeviceShardingGroup],
-    build_config: BuildConfiguration,
-    fleet: DeviceFleet,
-    test_cases: Iterable[TestCase],
-) -> None:
-    for group in fleet.get_missing():
-        device_config = DeviceConfig(group.abis, group.version, group.supports_mte)
-        if not device_config.can_run_build_config(build_config):
-            # These are a configuration that will never be valid, like a minSdkVersion
-            # 30 test on an API 21 device. No need to report these.
-            continue
-        for test_case in test_cases:
-            report.add_result(
-                test_case.build_system,
-                Skipped(TestRun(test_case, group), "No devices available"),
-            )
-
-
-def wait_for_results(
-    report: Report[DeviceShardingGroup],
-    workqueue: ShardingWorkQueue[TestResult, Device],
-    printer: Printer,
-) -> None:
+def wait_for_results(report: Report, workqueue: ShardingWorkQueue,
+                     printer: Printer) -> None:
     console = ndk.ansi.get_console()
     ui = ndk.test.ui.get_test_progress_ui(console, workqueue)
     with ndk.ansi.disable_terminal_echo(sys.stdin):
         with console.cursor_hide_context():
             while not workqueue.finished():
-                results = workqueue.get_results()
-                verbose = logger().isEnabledFor(logging.INFO)
-                if verbose or any(r.failed() for r in results):
+                result = workqueue.get_result()
+                suite = result.test.build_system
+                report.add_result(suite, result)
+                if logger().isEnabledFor(logging.INFO):
                     ui.clear()
-                for result in results:
-                    suite = result.test.build_system
-                    report.add_result(suite, result)
-                    if verbose or result.failed():
-                        printer.print_result(result)
+                    printer.print_result(result)
+                elif result.failed():
+                    ui.clear()
+                    printer.print_result(result)
                 ui.draw()
             ui.clear()
 
@@ -335,210 +595,124 @@
     assert isinstance(result, Failure)
 
     # adb might return no text at all under high load.
-    if "Could not find exit status in shell output." in result.message:
+    if 'Could not find exit status in shell output.' in result.message:
+        return True
+
+    # These libc++ tests expect to complete in a specific amount of time,
+    # and commonly fail under high load.
+    name = result.test.name
+    if 'libc++.libcxx/thread' in name or 'libc++.std/thread' in name:
         return True
 
     return False
 
 
-def restart_flaky_tests(
-    report: Report[DeviceShardingGroup],
-    workqueue: ShardingWorkQueue[TestResult, Device],
-) -> None:
+def restart_flaky_tests(report: Report, workqueue: ShardingWorkQueue) -> None:
     """Finds and restarts any failing flaky tests."""
     rerun_tests = report.remove_all_failing_flaky(flake_filter)
     if rerun_tests:
         cooldown = 10
         logger().warning(
-            "Found %d flaky failures. Sleeping for %d seconds to let "
-            "devices recover.",
-            len(rerun_tests),
-            cooldown,
-        )
+            'Found %d flaky failures. Sleeping for %d seconds to let '
+            'devices recover.', len(rerun_tests), cooldown)
         time.sleep(cooldown)
 
     for flaky_report in rerun_tests:
-        logger().warning("Flaky test failure: %s", flaky_report.result)
+        logger().warning('Flaky test failure: %s', flaky_report.result)
         group = flaky_report.result.test.device_group
         workqueue.add_task(group, run_test, flaky_report.result.test)
 
 
-def run_and_collect_logs(worker: Worker, test_run: TestRun) -> TestResult:
-    device: Device = worker.data[0]
-    worker.status = "Clearing device log"
-    device.clear_logcat()
-    result = run_test(worker, test_run)
-    if not isinstance(result, Failure):
-        logger().warning(
-            "Failing test passed on re-run while collecting logs. This makes testing "
-            "slower. Test flake should be investigated."
-        )
-        return result
-    worker.status = "Collecting device log"
-    log = device.logcat()
-    result.message += f"\nlogcat contents:\n{log}"
-    return result
-
-
-def get_and_attach_logs_for_failing_tests(
-    fleet: DeviceFleet, report: Report[DeviceShardingGroup], printer: Printer
-) -> None:
-    failures = report.remove_all_true_failures()
-    if not failures:
-        return
-
-    # Have to use max of one worker per re-run to ensure that the logs we collect do not
-    # conflate with other tests.
-    queue: ShardingWorkQueue[TestResult, Device] = ShardingWorkQueue(
-        fleet.get_unique_device_groups(), 1
-    )
-    try:
-        for failure in failures:
-            queue.add_task(failure.user_data, run_and_collect_logs, failure.test)
-        wait_for_results(report, queue, printer)
-    finally:
-        queue.terminate()
-        queue.join()
+def get_config_dict(config: str, abis: Iterable[Abi]) -> Dict[str, Any]:
+    with open(config) as test_config_file:
+        test_config = json.load(test_config_file)
+    if abis is not None:
+        test_config['abis'] = abis
+    return test_config
 
 
 def str_to_bool(s: str) -> bool:
-    if s == "true":
+    if s == 'true':
         return True
-    if s == "false":
+    elif s == 'false':
         return False
     raise ValueError(s)
 
 
 def parse_args() -> argparse.Namespace:
-    doc = "https://android.googlesource.com/platform/ndk/+/master/docs/Testing.md"
-    parser = argparse.ArgumentParser(epilog="See {} for more information.".format(doc))
+    doc = ('https://android.googlesource.com/platform/ndk/+/master/'
+           'docs/Testing.md')
+    parser = argparse.ArgumentParser(
+        epilog='See {} for more information.'.format(doc))
 
-    def PathArg(path: str) -> Path:
-        # Path.resolve() fails if the path doesn't exist. We want to resolve
-        # symlinks when possible, but not require that the path necessarily
-        # exist, because we will create it later.
-        return Path(path).expanduser().resolve(strict=False)
-
-    def ExistingPathArg(path: str) -> Path:
-        expanded_path = Path(path).expanduser()
-        if not expanded_path.exists():
-            raise argparse.ArgumentTypeError("{} does not exist".format(path))
-        return expanded_path.resolve(strict=True)
-
-    def ExistingDirectoryArg(path: str) -> Path:
-        expanded_path = Path(path).expanduser()
-        if not expanded_path.is_dir():
-            raise argparse.ArgumentTypeError("{} is not a directory".format(path))
-        return expanded_path.resolve(strict=True)
-
-    def ExistingFileArg(path: str) -> Path:
-        expanded_path = Path(path).expanduser()
-        if not expanded_path.is_file():
-            raise argparse.ArgumentTypeError("{} is not a file".format(path))
-        return expanded_path.resolve(strict=True)
-
-    config_options = parser.add_argument_group("Test Configuration Options")
+    config_options = parser.add_argument_group('Test Configuration Options')
     config_options.add_argument(
-        "--filter", help="Only run tests that match the given pattern."
-    )
+        '--filter', help='Only run tests that match the given pattern.')
     config_options.add_argument(
-        "--abi",
-        action="append",
-        choices=ndk.abis.ALL_ABIS,
-        help="Test only the given APIs.",
-    )
+        '--abi', action='append', choices=ndk.abis.ALL_ABIS,
+        help='Test only the given APIs.')
 
     # The type ignore is needed because realpath is an overloaded function, and
     # mypy is bad at those (it doesn't satisfy Callable[[str], AnyStr]).
-    config_options.add_argument(
-        "--config",
-        type=ExistingFileArg,
-        default=ndk.paths.ndk_path("qa_config.json"),
-        help="Path to the config file describing the test run.",
-    )
+    config_options.add_argument(  # type: ignore
+        '--config',
+        type=os.path.realpath,
+        default='qa_config.json',
+        help='Path to the config file describing the test run.')
 
-    build_options = parser.add_argument_group("Build Options")
-    build_options.add_argument(
-        "--build-report",
-        type=PathArg,
-        help="Write the build report to the given path.",
-    )
+    build_options = parser.add_argument_group('Build Options')
+    build_options.add_argument(  # type: ignore
+        '--build-report',
+        type=os.path.realpath,
+        help='Write the build report to the given path.')
 
     build_exclusive_group = build_options.add_mutually_exclusive_group()
     build_exclusive_group.add_argument(
-        "--rebuild", action="store_true", help="Build the tests before running."
-    )
+        '--rebuild', action='store_true',
+        help='Build the tests before running.')
     build_exclusive_group.add_argument(
-        "--build-only", action="store_true", help="Builds the tests and exits."
-    )
+        '--build-only', action='store_true',
+        help='Builds the tests and exits.')
     build_options.add_argument(
-        "--clean", action="store_true", help="Remove the out directory before building."
-    )
-    build_options.add_argument(
-        "--package",
-        action="store_true",
-        help="Package the built tests. Requires --rebuild or --build-only.",
-    )
+        '--clean', action='store_true',
+        help='Remove the out directory before building.')
 
-    run_options = parser.add_argument_group("Test Run Options")
+    run_options = parser.add_argument_group('Test Run Options')
     run_options.add_argument(
-        "--clean-device",
-        action="store_true",
-        help="Clear the device directories before syncing.",
-    )
+        '--clean-device', action='store_true',
+        help='Clear the device directories before syncing.')
     run_options.add_argument(
-        "--require-all-devices",
-        action="store_true",
-        help="Abort if any devices specified by the config are not available.",
-    )
+        '--require-all-devices', action='store_true',
+        help='Abort if any devices specified by the config are not available.')
 
-    display_options = parser.add_argument_group("Display Options")
+    display_options = parser.add_argument_group('Display Options')
     display_options.add_argument(
-        "--show-all",
-        action="store_true",
-        help="Show all test results, not just failures.",
-    )
+        '--show-all', action='store_true',
+        help='Show all test results, not just failures.')
     display_options.add_argument(
-        "--show-test-stats",
-        action="store_true",
-        help="Print number of tests found for each configuration.",
-    )
+        '--show-test-stats', action='store_true',
+        help='Print number of tests found for each configuration.')
     display_options.add_argument(
-        "-v",
-        "--verbose",
-        action="count",
-        default=0,
-        help="Increase log level. Defaults to logging.WARNING.",
-    )
+        '-v', '--verbose', action='count', default=0,
+        help='Increase log level. Defaults to logging.WARNING.')
 
-    parser.add_argument(
-        "--ndk",
-        type=ExistingPathArg,
+    parser.add_argument(  # type: ignore
+        '--ndk',
+        type=os.path.realpath,
         default=ndk.paths.get_install_path(),
-        help="NDK to validate. Defaults to ../out/android-ndk-$RELEASE.",
-    )
-    parser.add_argument(
-        "--test-src",
-        type=ExistingDirectoryArg,
-        default=ndk.paths.ndk_path("tests"),
-        help="Path to test source directory. Defaults to ndk/tests.",
-    )
+        help='NDK to validate. Defaults to ../out/android-ndk-$RELEASE.')
+    parser.add_argument(  # type: ignore
+        '--test-src',
+        type=os.path.realpath,
+        help='Path to test source directory. Defaults to ./tests.')
 
-    parser.add_argument(
-        "test_dir",
-        metavar="TEST_DIR",
-        type=PathArg,
-        nargs="?",
-        default=ndk.paths.path_in_out(Path("tests")),
-        help="Directory containing built tests.",
-    )
-
-    parser.add_argument(
-        "--dist-dir",
-        type=PathArg,
-        default=ndk.paths.get_dist_dir(),
-        help="Directory to store packaged tests. Defaults to $DIST_DIR or ../out/dist",
-    )
+    parser.add_argument(  # type: ignore
+        'test_dir',
+        metavar='TEST_DIR',
+        type=os.path.realpath,
+        nargs='?',
+        default=ndk.paths.path_in_out('tests'),
+        help='Directory containing built tests.')
 
     return parser.parse_args()
 
@@ -554,7 +728,7 @@
             raise ValueError
         self.success = True
 
-    def failed(self, message: Optional[str] = None) -> None:
+    def failed(self, message: str = None) -> None:
         if self.success is not None:
             raise ValueError
         self.success = False
@@ -566,124 +740,67 @@
         assert timer.duration is not None
         self.times[label] = timer.duration
 
-    @contextmanager
-    def timed(self, description: str) -> Iterator[None]:
-        timer = Timer()
-        with timer:
-            yield
-        self.add_timing_report(description, timer)
-
-
-def unzip_ndk(ndk_path: Path) -> Path:
-    # Unzip the NDK into out/ndk-zip.
-    if ndk_path.suffix != ".zip":
-        raise ValueError(f"--ndk must be a directory or a .zip file: {ndk}")
-
-    ndk_dir = ndk.paths.path_in_out(Path(ndk_path.stem))
-    if ndk_dir.exists():
-        shutil.rmtree(ndk_dir)
-    ndk_dir.mkdir(parents=True)
-    try:
-        ndk.archive.unzip(ndk_path, ndk_dir)
-        contents = list(ndk_dir.iterdir())
-        assert len(contents) == 1
-        assert contents[0].is_dir()
-        # Windows paths, by default, are limited to 260 characters.
-        # Some of our deeply nested paths run up against this limitation.
-        # Therefore, after unzipping the NDK into something like
-        # out/android-ndk-8136140-windows-x86_64/android-ndk-r25-canary
-        # (61 characters) we rename it to out/ndk-zip (7 characters),
-        # shortening paths in the NDK by 54 characters.
-        short_path = ndk.paths.path_in_out(Path("ndk-zip"))
-        if short_path.exists():
-            shutil.rmtree(short_path)
-        contents[0].rename(short_path)
-        return short_path
-    finally:
-        shutil.rmtree(ndk_dir)
-
-
-def rebuild_tests(
-    args: argparse.Namespace, results: Results, test_spec: TestSpec
-) -> bool:
-    build_printer = StdoutPrinter(
-        show_all=args.show_all,
-        result_translations=ResultTranslations(success="BUILT"),
-    )
-    with results.timed("Build"):
-        test_options = ndk.test.spec.TestOptions(
-            args.test_src,
-            args.ndk,
-            args.test_dir,
-            test_filter=args.filter,
-            clean=args.clean,
-            package_path=args.dist_dir / "ndk-tests" if args.package else None,
-        )
-        builder = ndk.test.builder.TestBuilder(test_spec, test_options, build_printer)
-        report = builder.build()
-
-    if report.num_tests == 0:
-        results.failed("Found no tests for filter {}.".format(args.filter))
-        return False
-
-    build_printer.print_summary(report)
-    if not report.successful:
-        results.failed()
-        return False
-
-    return True
-
 
 def run_tests(args: argparse.Namespace) -> Results:
     results = Results()
 
-    if not args.test_dir.exists():
+    if not os.path.exists(args.test_dir):
         if args.rebuild or args.build_only:
-            args.test_dir.mkdir(parents=True)
+            os.makedirs(args.test_dir)
         else:
-            sys.exit("Test output directory does not exist: {}".format(args.test_dir))
+            sys.exit('Test output directory does not exist: {}'.format(
+                args.test_dir))
 
-    if args.package and not args.dist_dir.exists():
-        if args.rebuild or args.build_only:
-            args.dist_dir.mkdir(parents=True)
-
-    test_spec = TestSpec.load(args.config, abis=args.abi)
+    test_config = get_config_dict(args.config, args.abi)
 
     printer = StdoutPrinter(show_all=args.show_all)
 
-    if args.ndk.is_file():
-        args.ndk = unzip_ndk(args.ndk)
+    if args.test_src is None:
+        args.test_src = os.path.realpath('tests')
+        if not os.path.exists(args.test_src):
+            sys.exit('Test source directory does not exist: {}'.format(
+                args.test_src))
 
-    test_dist_dir = args.test_dir / "dist"
     if args.build_only or args.rebuild:
-        if not rebuild_tests(args, results, test_spec):
+        build_timer = Timer()
+        with build_timer:
+            report = build_tests(
+                args.test_src, args.ndk, args.test_dir, args.clean, printer,
+                test_config, args.filter)
+
+        results.add_timing_report('Build', build_timer)
+
+        if report.num_tests == 0:
+            results.failed('Found no tests for filter {}.'.format(args.filter))
+            return results
+
+        printer.print_summary(report)
+        if not report.successful:
+            results.failed()
             return results
 
     if args.build_only:
         results.passed()
         return results
 
+    test_dist_dir = os.path.join(args.test_dir, 'dist')
     test_filter = TestFilter.from_string(args.filter)
     # dict of {BuildConfiguration: [Test]}
-    config_filter = ConfigFilter(test_spec)
-    with results.timed("Test discovery"):
+    config_filter = ConfigFilter(test_config)
+    test_discovery_timer = Timer()
+    with test_discovery_timer:
         test_groups = enumerate_tests(
-            test_dist_dir,
-            args.test_src,
-            ndk.paths.DEVICE_TEST_BASE_DIR,
-            test_filter,
-            config_filter,
-        )
+            test_dist_dir, args.test_src, test_filter, config_filter)
+    results.add_timing_report('Test discovery', test_discovery_timer)
 
-    if sum(len(tests) for tests in test_groups.values()) == 0:
+    if sum([len(tests) for tests in test_groups.values()]) == 0:
         # As long as we *built* some tests, not having anything to run isn't a
         # failure.
         if args.rebuild:
             results.passed()
         else:
-            results.failed(
-                "Found no tests in {} for filter {}.".format(test_dist_dir, args.filter)
-            )
+            results.failed('Found no tests in {} for filter {}.'.format(
+                test_dist_dir, args.filter))
         return results
 
     if args.show_test_stats:
@@ -709,35 +826,39 @@
     # configuration that is unclaimed, print a warning.
     workqueue = WorkQueue()
     try:
-        with results.timed("Device discovery"):
-            fleet = find_devices(test_spec.devices, workqueue)
+        device_discovery_timer = Timer()
+        with device_discovery_timer:
+            fleet = find_devices(test_config['devices'], workqueue)
+        results.add_timing_report('Device discovery', device_discovery_timer)
 
         have_all_devices = verify_have_all_requested_devices(fleet)
         if args.require_all_devices and not have_all_devices:
-            results.failed("Some requested devices were not available.")
+            results.failed('Some requested devices were not available.')
             return results
 
-        groups_for_config = match_configs_to_device_groups(fleet, test_groups.keys())
+        groups_for_config = match_configs_to_device_groups(
+            fleet, test_groups.keys())
         for config in find_configs_with_no_device(groups_for_config):
-            logger().warning("No device found for %s.", config)
+            logger().warning('No device found for %s.', config)
 
+        report = Report()
+        clean_device_timer = Timer()
         if args.clean_device:
-            with results.timed("Clean device"):
+            with clean_device_timer:
                 clear_test_directories(workqueue, fleet)
+            results.add_timing_report('Clean device', clean_device_timer)
 
-        can_use_sync = adb_has_feature("push_sync")
-        with results.timed("Push"):
+        can_use_sync = adb_has_feature('push_sync')
+        push_timer = Timer()
+        with push_timer:
             push_tests_to_devices(
-                workqueue, test_dist_dir, groups_for_config, can_use_sync
-            )
+                workqueue, test_dist_dir, groups_for_config, can_use_sync)
+        results.add_timing_report('Push', push_timer)
     finally:
         workqueue.terminate()
         workqueue.join()
 
-    report = Report[DeviceShardingGroup]()
-    shard_queue: ShardingWorkQueue[TestResult, Device] = ShardingWorkQueue(
-        fleet.get_unique_device_groups(), 4
-    )
+    shard_queue = ShardingWorkQueue(fleet.get_unique_device_groups(), 4)
     try:
         # Need an input queue per device group, a single result queue, and a
         # pool of threads per device.
@@ -745,23 +866,23 @@
         # Shuffle the test runs to distribute the load more evenly. These are
         # ordered by (build config, device, test), so most of the tests running
         # at any given point in time are all running on the same device.
-        test_runs = pair_test_runs(test_groups, groups_for_config, report, fleet)
+        test_runs = pair_test_runs(test_groups, groups_for_config)
         random.shuffle(test_runs)
-        with results.timed("Run"):
+        test_run_timer = Timer()
+        with test_run_timer:
             for test_run in test_runs:
                 shard_queue.add_task(test_run.device_group, run_test, test_run)
 
             wait_for_results(report, shard_queue, printer)
             restart_flaky_tests(report, shard_queue)
             wait_for_results(report, shard_queue, printer)
+        results.add_timing_report('Run', test_run_timer)
+
+        printer.print_summary(report)
     finally:
         shard_queue.terminate()
         shard_queue.join()
 
-    get_and_attach_logs_for_failing_tests(fleet, report, printer)
-
-    printer.print_summary(report)
-
     if report.successful:
         results.passed()
     else:
@@ -773,14 +894,12 @@
 def main() -> None:
     args = parse_args()
 
-    ensure_python_environment()
-
     log_levels = [logging.WARNING, logging.INFO, logging.DEBUG]
     verbosity = min(args.verbose, len(log_levels) - 1)
     log_level = log_levels[verbosity]
     logging.basicConfig(level=log_level)
 
-    python_packages = args.ndk / "python-packages"
+    python_packages = os.path.join(args.ndk, 'python-packages')
     site.addsitedir(python_packages)
 
     total_timer = Timer()
@@ -788,23 +907,22 @@
         results = run_tests(args)
 
     if results.success is None:
-        raise RuntimeError("run_tests returned without indicating success or failure.")
+        raise RuntimeError(
+            'run_tests returned without indicating success or failure.')
 
     good = results.success
-    print("Finished {}".format("successfully" if good else "unsuccessfully"))
-    if (message := results.failure_message) is not None:
-        print(message)
+    print('Finished {}'.format('successfully' if good else 'unsuccessfully'))
 
     for timer, duration in results.times.items():
-        print("{}: {}".format(timer, duration))
-    print("Total: {}".format(total_timer.duration))
+        print('{}: {}'.format(timer, duration))
+    print('Total: {}'.format(total_timer.duration))
 
-    subject = "NDK Testing {}!".format("Passed" if good else "Failed")
-    body = "Testing finished in {}".format(total_timer.duration)
+    subject = 'NDK Testing {}!'.format('Passed' if good else 'Failed')
+    body = 'Testing finished in {}'.format(total_timer.duration)
     ndk.notify.toast(subject, body)
 
     sys.exit(not good)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/ndk/termcolor.py b/ndk/termcolor.py
index 7624194..586acaa 100644
--- a/ndk/termcolor.py
+++ b/ndk/termcolor.py
@@ -21,11 +21,11 @@
 def color_string(string: str, color: str) -> str:
     """Returns a string that will be colored when printed to a terminal."""
     colors = {
-        "green": "\033[92m",
-        "red": "\033[91m",
-        "yellow": "\033[93m",
+        'green': '\033[92m',
+        'red': '\033[91m',
+        'yellow': '\033[93m',
     }
-    end_color = "\033[0m"
+    end_color = '\033[0m'
     return colors[color] + string + end_color
 
 
diff --git a/ndk/test/builder.py b/ndk/test/builder.py
index e62f72e..0f3ebd7 100644
--- a/ndk/test/builder.py
+++ b/ndk/test/builder.py
@@ -24,24 +24,23 @@
 import shutil
 import sys
 import traceback
-from pathlib import Path
-from typing import Dict, List, Tuple
+from typing import (
+    Dict,
+    List,
+    Tuple,
+)
 
 import ndk.abis
-import ndk.archive
-import ndk.paths
-import ndk.test.devicetest.scanner
-import ndk.test.spec
-import ndk.test.suites
-import ndk.test.ui
-import ndk.ui
-from ndk.test.buildtest.case import Test
-from ndk.test.buildtest.scanner import TestScanner
-from ndk.test.devices import DeviceConfig
 from ndk.test.filters import TestFilter
 from ndk.test.printers import Printer
 from ndk.test.report import Report
-from ndk.workqueue import AnyWorkQueue, Worker, WorkQueue
+from ndk.test.scanner import TestScanner
+import ndk.test.spec
+import ndk.test.suites
+from ndk.test.types import Test
+import ndk.test.ui
+from ndk.toolchains import LinkerOption
+from ndk.workqueue import LoadRestrictingWorkQueue, Worker
 
 
 def logger() -> logging.Logger:
@@ -49,54 +48,58 @@
     return logging.getLogger(__name__)
 
 
-def write_build_report(build_report: str, results: Report[None]) -> None:
-    with open(build_report, "wb") as build_report_file:
+def test_spec_from_config(test_config: Dict) -> ndk.test.spec.TestSpec:
+    """Returns a TestSpec based on the test config file."""
+    abis = test_config.get('abis', ndk.abis.ALL_ABIS)
+    suites = test_config.get('suites', ndk.test.suites.ALL_SUITES)
+    linkers_str = test_config.get('linkers', None)
+    if linkers_str is None:
+        linkers = list(LinkerOption)
+    else:
+        linkers = [LinkerOption(l) for l in linkers_str]
+
+    return ndk.test.spec.TestSpec(abis, linkers, suites)
+
+
+def write_build_report(build_report: str, results: Report) -> None:
+    with open(build_report, 'wb') as build_report_file:
         pickle.dump(results, build_report_file)
 
 
-def scan_test_suite(suite_dir: Path, test_scanner: TestScanner) -> List[Test]:
+def scan_test_suite(suite_dir: str, test_scanner: TestScanner) -> List[Test]:
     tests: List[Test] = []
     for dentry in os.listdir(suite_dir):
-        path = suite_dir / dentry
-        if path.is_dir():
-            test_name = path.name
+        path = os.path.join(suite_dir, dentry)
+        if os.path.isdir(path):
+            test_name = os.path.basename(path)
             tests.extend(test_scanner.find_tests(path, test_name))
     return tests
 
 
-def _fixup_expected_failure(
-    result: ndk.test.result.TestResult, config: str, bug: str
-) -> ndk.test.result.TestResult:
+def _fixup_expected_failure(result: ndk.test.result.TestResult, config: str,
+                            bug: str) -> ndk.test.result.TestResult:
     if isinstance(result, ndk.test.result.Failure):
-        return ndk.test.result.ExpectedFailure(result.test, result.message, config, bug)
-    if isinstance(result, ndk.test.result.Success):
+        return ndk.test.result.ExpectedFailure(result.test, config, bug)
+    elif isinstance(result, ndk.test.result.Success):
         return ndk.test.result.UnexpectedSuccess(result.test, config, bug)
-    # Skipped, UnexpectedSuccess, or ExpectedFailure.
-    return result
+    else:  # Skipped, UnexpectedSuccess, or ExpectedFailure.
+        return result
 
 
 def _fixup_negative_test(
-    result: ndk.test.result.TestResult,
-) -> ndk.test.result.TestResult:
+        result: ndk.test.result.TestResult) -> ndk.test.result.TestResult:
     if isinstance(result, ndk.test.result.Failure):
         return ndk.test.result.Success(result.test)
-    if isinstance(result, ndk.test.result.Success):
-        return ndk.test.result.Failure(result.test, "negative test case succeeded")
-    # Skipped, UnexpectedSuccess, or ExpectedFailure.
-    return result
+    elif isinstance(result, ndk.test.result.Success):
+        return ndk.test.result.Failure(
+            result.test, 'negative test case succeeded')
+    else:  # Skipped, UnexpectedSuccess, or ExpectedFailure.
+        return result
 
 
-RunTestResult = tuple[str, ndk.test.result.TestResult, list[Test]]
-
-
-def _run_test(
-    worker: Worker,
-    suite: str,
-    test: Test,
-    obj_dir: Path,
-    dist_dir: Path,
-    test_filters: TestFilter,
-) -> RunTestResult:
+def _run_test(worker: Worker, suite: str, test: Test,
+              obj_dir: str, dist_dir: str, test_filters: TestFilter
+              ) -> Tuple[str, ndk.test.result.TestResult, List[Test]]:
     """Runs a given test according to the given filters.
 
     Args:
@@ -110,11 +113,11 @@
     Returns: Tuple of (suite, TestResult, [Test]). The [Test] element is a list
              of additional tests to be run.
     """
-    worker.status = "Building {}".format(test)
+    worker.status = 'Building {}'.format(test)
 
     config = test.check_unsupported()
     if config is not None:
-        message = "test unsupported for {}".format(config)
+        message = 'test unsupported for {}'.format(config)
         return suite, ndk.test.result.Skipped(test, message), []
 
     try:
@@ -134,82 +137,85 @@
 
 
 class TestBuilder:
-    def __init__(
-        self,
-        test_spec: ndk.test.spec.TestSpec,
-        test_options: ndk.test.spec.TestOptions,
-        printer: Printer,
-    ) -> None:
+    def __init__(self, test_spec: ndk.test.spec.TestSpec,
+                 test_options: ndk.test.spec.TestOptions,
+                 printer: Printer) -> None:
         self.printer = printer
         self.tests: Dict[str, List[Test]] = {}
-        self.build_dirs: Dict[Path, Tuple[str, Test]] = {}
+        self.build_dirs: Dict[str, Tuple[str, Test]] = {}
 
         self.test_options = test_options
 
-        self.obj_dir = self.test_options.out_dir / "obj"
-        self.dist_dir = self.test_options.out_dir / "dist"
+        self.obj_dir = os.path.join(self.test_options.out_dir, 'obj')
+        self.dist_dir = os.path.join(self.test_options.out_dir, 'dist')
 
-        self.test_spec = test_spec
-        self.find_tests()
+        self.find_tests(test_spec)
 
-    def find_tests(self) -> None:
-        scanner = ndk.test.buildtest.scanner.BuildTestScanner(
-            self.test_options.ndk_path
-        )
-        nodist_scanner = ndk.test.buildtest.scanner.BuildTestScanner(
-            self.test_options.ndk_path, dist=False
-        )
-        # This is always None for the global config while building. See the comment in
-        # the definition of BuildConfiguration.
-        build_api_level = None
-        for abi in self.test_spec.abis:
-            for toolchain_file in ndk.test.spec.CMakeToolchainFile:
-                for weak_symbols in ndk.test.spec.WeakSymbolsConfig:
-                    config = ndk.test.spec.BuildConfiguration(
-                        abi, build_api_level, toolchain_file, weak_symbols
-                    )
-                    scanner.add_build_configuration(config)
-                    nodist_scanner.add_build_configuration(config)
+    def find_tests(self, test_spec: ndk.test.spec.TestSpec) -> None:
+        scanner = ndk.test.scanner.BuildTestScanner(self.test_options.ndk_path)
+        nodist_scanner = ndk.test.scanner.BuildTestScanner(
+            self.test_options.ndk_path, dist=False)
+        libcxx_scanner = ndk.test.scanner.LibcxxTestScanner(
+            self.test_options.ndk_path)
+        for abi in test_spec.abis:
+            for linker in test_spec.linkers:
+                build_api_level = None  # Always use the default.
 
-        if "build" in self.test_spec.suites:
-            test_src = self.test_options.src_dir / "build"
-            self.add_suite("build", test_src, nodist_scanner)
-        if "device" in self.test_spec.suites:
-            test_src = self.test_options.src_dir / "device"
-            self.add_suite("device", test_src, scanner)
+                scanner.add_build_configuration(abi, build_api_level, linker)
+                nodist_scanner.add_build_configuration(abi, build_api_level,
+                                                       linker)
+                libcxx_scanner.add_build_configuration(abi, build_api_level,
+                                                       linker)
 
-    def add_suite(self, name: str, path: Path, test_scanner: TestScanner) -> None:
+        if 'build' in test_spec.suites:
+            test_src = os.path.join(self.test_options.src_dir, 'build')
+            self.add_suite('build', test_src, nodist_scanner)
+        if 'device' in test_spec.suites:
+            test_src = os.path.join(self.test_options.src_dir, 'device')
+            self.add_suite('device', test_src, scanner)
+        if 'libc++' in test_spec.suites:
+            test_src = os.path.join(self.test_options.src_dir, 'libc++')
+            self.add_suite('libc++', test_src, libcxx_scanner)
+
+    @classmethod
+    def from_config_file(cls, config_path: str,
+                         test_options: ndk.test.spec.TestOptions,
+                         printer: Printer) -> 'TestBuilder':
+        with open(config_path) as test_config_file:
+            test_config = json.load(test_config_file)
+        spec = test_spec_from_config(test_config)
+        return cls(spec, test_options, printer)
+
+    def add_suite(self, name: str, path: str,
+                  test_scanner: TestScanner) -> None:
         if name in self.tests:
-            raise KeyError("suite {} already exists".format(name))
+            raise KeyError('suite {} already exists'.format(name))
         new_tests = scan_test_suite(path, test_scanner)
         self.check_no_overlapping_build_dirs(name, new_tests)
         self.tests[name] = new_tests
 
-    def check_no_overlapping_build_dirs(
-        self, suite: str, new_tests: List[Test]
-    ) -> None:
+    def check_no_overlapping_build_dirs(self, suite: str,
+                                        new_tests: List[Test]) -> None:
         for test in new_tests:
-            build_dir = test.get_build_dir(Path(""))
+            build_dir = test.get_build_dir('')
             if build_dir in self.build_dirs:
                 dup_suite, dup_test = self.build_dirs[build_dir]
                 raise RuntimeError(
-                    "Found duplicate build directory:\n{} {}\n{} {}".format(
-                        dup_suite, dup_test, suite, test
-                    )
-                )
+                    'Found duplicate build directory:\n{} {}\n{} {}'.format(
+                        dup_suite, dup_test, suite, test))
             self.build_dirs[build_dir] = (suite, test)
 
     def make_out_dirs(self) -> None:
-        if not self.obj_dir.exists():
-            self.obj_dir.mkdir(parents=True)
-        if not self.dist_dir.exists():
-            self.dist_dir.mkdir(parents=True)
+        if not os.path.exists(self.obj_dir):
+            os.makedirs(self.obj_dir)
+        if not os.path.exists(self.dist_dir):
+            os.makedirs(self.dist_dir)
 
     def clean_out_dir(self) -> None:
-        if self.test_options.out_dir.exists():
+        if os.path.exists(self.test_options.out_dir):
             shutil.rmtree(self.test_options.out_dir)
 
-    def build(self) -> Report[None]:
+    def build(self) -> Report:
         if self.test_options.clean:
             self.clean_out_dir()
         self.make_out_dirs()
@@ -218,12 +224,10 @@
         result = self.do_build(test_filters)
         if self.test_options.build_report:
             write_build_report(self.test_options.build_report, result)
-        if result.successful and self.test_options.package_path is not None:
-            self.package()
         return result
 
-    def do_build(self, test_filters: TestFilter) -> Report[None]:
-        workqueue = WorkQueue()
+    def do_build(self, test_filters: TestFilter) -> Report:
+        workqueue = LoadRestrictingWorkQueue()
         try:
             for suite, tests in self.tests.items():
                 # Each test configuration was expanded when each test was
@@ -234,16 +238,17 @@
                 for test in tests:
                     if not test_filters.filter(test.name):
                         continue
-                    workqueue.add_task(
-                        _run_test,
-                        suite,
-                        test,
-                        self.obj_dir,
-                        self.dist_dir,
-                        test_filters,
-                    )
 
-            report = Report[None]()
+                    if test.name == 'libc++':
+                        workqueue.add_load_restricted_task(
+                            _run_test, suite, test, self.obj_dir,
+                            self.dist_dir, test_filters)
+                    else:
+                        workqueue.add_task(
+                            _run_test, suite, test, self.obj_dir,
+                            self.dist_dir, test_filters)
+
+            report = Report()
             self.wait_for_results(report, workqueue, test_filters)
 
             return report
@@ -251,87 +256,33 @@
             workqueue.terminate()
             workqueue.join()
 
-    def wait_for_results(
-        self,
-        report: Report[None],
-        workqueue: AnyWorkQueue,
-        test_filters: TestFilter,
-    ) -> None:
+    def wait_for_results(self, report: Report,
+                         workqueue: LoadRestrictingWorkQueue,
+                         test_filters: TestFilter) -> None:
         console = ndk.ansi.get_console()
-        ui = ndk.ui.get_work_queue_ui(console, workqueue)
+        ui = ndk.test.ui.get_test_build_progress_ui(console, workqueue)
         with ndk.ansi.disable_terminal_echo(sys.stdin):
             with console.cursor_hide_context():
                 while not workqueue.finished():
-                    for suite, result, additional_tests in workqueue.get_results():
-                        assert result.passed() or not additional_tests
-                        for test in additional_tests:
-                            workqueue.add_task(
-                                _run_test,
-                                suite,
-                                test,
-                                self.obj_dir,
-                                self.dist_dir,
-                                test_filters,
-                            )
-                        if logger().isEnabledFor(logging.INFO):
-                            ui.clear()
-                            self.printer.print_result(result)
-                        elif result.failed():
-                            ui.clear()
-                            self.printer.print_result(result)
-                        report.add_result(suite, result)
+                    suite, result, additional_tests = workqueue.get_result()
+                    # Filtered test. Skip them entirely to avoid polluting
+                    # --show-all results.
+                    if result is None:
+                        assert not additional_tests
+                        ui.draw()
+                        continue
+
+                    assert result.passed() or not additional_tests
+                    for test in additional_tests:
+                        workqueue.add_task(
+                            _run_test, suite, test, self.obj_dir,
+                            self.dist_dir, test_filters)
+                    if logger().isEnabledFor(logging.INFO):
+                        ui.clear()
+                        self.printer.print_result(result)
+                    elif result.failed():
+                        ui.clear()
+                        self.printer.print_result(result)
+                    report.add_result(suite, result)
                     ui.draw()
                 ui.clear()
-
-    def package(self) -> None:
-        assert self.test_options.package_path is not None
-        print("Packaging tests...")
-
-        ndk.archive.make_bztar(
-            self.test_options.package_path,
-            self.test_options.out_dir.parent,
-            Path("tests/dist"),
-        )
-
-        test_groups = ndk.test.devicetest.scanner.enumerate_tests(
-            self.test_options.out_dir / "dist",
-            self.test_options.src_dir,
-            ndk.paths.DEVICE_TEST_BASE_DIR,
-            TestFilter.from_string(self.test_options.test_filter),
-            ndk.test.devicetest.scanner.ConfigFilter(self.test_spec),
-        )
-        tests_json: dict[str, list[dict[str, str | list[int]]]] = {}
-        for config, tests in test_groups.items():
-            testlist: list[dict[str, str | list[int]]] = []
-            for test in tests:
-                testobj: dict[str, str | list[int]] = {
-                    "cmd": test.cmd,
-                    "name": f"{config}.{test.build_system}.{test.name}",
-                }
-                unsupported: list[int] = []
-                broken: list[int] = []
-                for device_version, abis in self.test_spec.devices.items():
-                    if config.abi not in abis:
-                        continue
-                    # Pretend device doesn't support MTE which is the safer bet.
-                    device_config = DeviceConfig([config.abi], device_version, False)
-                    if test.check_unsupported(device_config) is not None:
-                        unsupported.append(device_version)
-                    else:
-                        broken_config, _bug = test.check_broken(device_config)
-                        if broken_config is not None:
-                            broken.append(device_version)
-                if unsupported:
-                    testobj["unsupported"] = unsupported
-                if broken:
-                    testobj["broken"] = broken
-                testlist.append(testobj)
-            tests_json[str(config)] = testlist
-        json_config_path = self.test_options.out_dir / "dist" / "tests.json"
-        with json_config_path.open("w", encoding="utf-8") as outfile:
-            json.dump(tests_json, outfile, indent=2)
-        shutil.copy2(json_config_path, self.test_options.package_path.parent)
-        shutil.copy2(
-            self.test_options.src_dir.parent / "qa_config.json",
-            self.test_options.package_path.parent,
-        )
diff --git a/ndk/test/buildtest/case.py b/ndk/test/buildtest/case.py
deleted file mode 100644
index 30e575d..0000000
--- a/ndk/test/buildtest/case.py
+++ /dev/null
@@ -1,508 +0,0 @@
-#
-# Copyright (C) 2015 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Build test cases."""
-
-import importlib.util
-import logging
-import multiprocessing
-import os
-import shlex
-import shutil
-import subprocess
-from abc import ABC, abstractmethod
-from importlib.abc import Loader
-from pathlib import Path
-from subprocess import CompletedProcess
-from typing import List, Optional, Tuple
-
-import ndk.ansi
-import ndk.ext.os
-import ndk.ext.subprocess
-import ndk.hosts
-import ndk.ndkbuild
-import ndk.paths
-from ndk.abis import Abi
-from ndk.cmake import find_cmake, find_ninja
-from ndk.test.config import TestConfig
-from ndk.test.filters import TestFilter
-from ndk.test.result import Failure, Skipped, Success, TestResult
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
-
-
-def logger() -> logging.Logger:
-    """Return the logger for this module."""
-    return logging.getLogger(__name__)
-
-
-def _get_jobs_args() -> List[str]:
-    cpus = multiprocessing.cpu_count()
-    return [f"-j{cpus}", f"-l{cpus}"]
-
-
-def _prep_build_dir(src_dir: Path, out_dir: Path) -> None:
-    if out_dir.exists():
-        shutil.rmtree(out_dir)
-    shutil.copytree(src_dir, out_dir, ignore=shutil.ignore_patterns("__pycache__"))
-
-
-class Test(ABC):
-    def __init__(
-        self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
-    ) -> None:
-        self.name = name
-        self.test_dir = test_dir
-        self.config = config
-        self.ndk_path = ndk_path
-        self.config = self.config.with_api(self.determine_api_level_for_config())
-
-    @abstractmethod
-    def determine_api_level_for_config(self) -> int: ...
-
-    def get_test_config(self) -> TestConfig:
-        return TestConfig.from_test_dir(self.test_dir)
-
-    def run(
-        self, obj_dir: Path, dist_dir: Path, test_filters: TestFilter
-    ) -> Tuple[TestResult, List["Test"]]:
-        raise NotImplementedError
-
-    def is_negative_test(self) -> bool:
-        raise NotImplementedError
-
-    def check_broken(self) -> tuple[None, None] | tuple[str, str]:
-        return self.get_test_config().build_broken(self)
-
-    def check_unsupported(self) -> Optional[str]:
-        return self.get_test_config().build_unsupported(self)
-
-    def get_build_dir(self, out_dir: Path) -> Path:
-        raise NotImplementedError
-
-    def __str__(self) -> str:
-        return f"{self.name} [{self.config}]"
-
-
-class BuildTest(Test):
-    def __init__(
-        self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
-    ) -> None:
-        super().__init__(name, test_dir, config, ndk_path)
-
-        if self.api is None:
-            raise ValueError
-
-    @property
-    def abi(self) -> Abi:
-        return self.config.abi
-
-    @property
-    def api(self) -> Optional[int]:
-        return self.config.api
-
-    @property
-    def ndk_build_flags(self) -> List[str]:
-        flags = self.config.get_extra_ndk_build_flags()
-        return flags + self.get_extra_ndk_build_flags()
-
-    @property
-    def cmake_flags(self) -> List[str]:
-        flags = self.config.get_extra_cmake_flags()
-        return flags + self.get_extra_cmake_flags()
-
-    def make_build_result(self, proc: CompletedProcess[str]) -> TestResult:
-        if proc.returncode == 0:
-            return Success(self)
-        return Failure(
-            self, f"Test build failed: {shlex.join(proc.args)}:\n{proc.stdout}"
-        )
-
-    def verify_no_cruft_in_dist(
-        self, dist_dir: Path, build_cmd: list[str]
-    ) -> Optional[Failure[None]]:
-        bad_files = []
-        for path in ndk.paths.walk(dist_dir, directories=False):
-            if path.suffix == ".a":
-                bad_files.append(str(path))
-        if bad_files:
-            files = "\n".join(bad_files)
-            return Failure(
-                self,
-                f"Found unexpected files in test dist directory. Build command was: "
-                f"{shlex.join(build_cmd)}\n{files}",
-            )
-        return None
-
-    def run(
-        self, obj_dir: Path, dist_dir: Path, _test_filters: TestFilter
-    ) -> Tuple[TestResult, List[Test]]:
-        raise NotImplementedError
-
-    def check_broken(self) -> tuple[None, None] | tuple[str, str]:
-        return self.get_test_config().build_broken(self)
-
-    def check_unsupported(self) -> Optional[str]:
-        return self.get_test_config().build_unsupported(self)
-
-    def is_negative_test(self) -> bool:
-        return self.get_test_config().is_negative_test()
-
-    def get_extra_cmake_flags(self) -> List[str]:
-        return self.get_test_config().extra_cmake_flags()
-
-    def get_extra_ndk_build_flags(self) -> List[str]:
-        return self.get_test_config().extra_ndk_build_flags()
-
-    def get_overridden_runtime_minsdkversion(self) -> int | None:
-        return self.get_test_config().override_runtime_minsdkversion(self)
-
-
-class PythonBuildTest(BuildTest):
-    """A test that is implemented by test.py.
-
-    A test.py test has a test.py file in its root directory. This module
-    contains a run_test function which returns a Tuple[bool, Optional[str]] of
-    the success status and, if applicable, an error message and takes the
-    following kwargs:
-
-    ndk_path: The path to the NDK under test.
-    abi: The ABI being tested.
-    api: The minSdkVersion being tested.
-    linker: The LinkerOption option being.
-
-    The test source directory will be copied into the test build directory for
-    the given build configuration. The working directory will automatically be
-    set to the root of the copied source test directory.
-    """
-
-    def __init__(
-        self, name: str, test_dir: Path, config: BuildConfiguration, ndk_path: Path
-    ) -> None:
-        super().__init__(name, test_dir, config, ndk_path)
-
-        if self.abi not in ndk.abis.ALL_ABIS:
-            raise ValueError("{} is not a valid ABI".format(self.abi))
-
-        try:
-            assert self.api is not None
-            int(self.api)
-        except ValueError as ex:
-            raise ValueError(f"{self.api} is not a valid API number") from ex
-
-    def determine_api_level_for_config(self) -> int:
-        return ndk.abis.min_api_for_abi(self.config.abi)
-
-    def get_build_dir(self, out_dir: Path) -> Path:
-        return out_dir / str(self.config) / "test.py" / self.name
-
-    def run(
-        self, obj_dir: Path, _dist_dir: Path, _test_filters: TestFilter
-    ) -> Tuple[TestResult, List[Test]]:
-        build_dir = self.get_build_dir(obj_dir)
-        logger().info("Building test: %s", self.name)
-        _prep_build_dir(self.test_dir, build_dir)
-        with ndk.ext.os.cd(build_dir):
-            spec = importlib.util.spec_from_file_location("test", "test.py")
-            if spec is None or spec.loader is None:
-                path = build_dir / "test.py"
-                raise RuntimeError(f"Could not import {path}")
-            module = importlib.util.module_from_spec(spec)
-            # https://github.com/python/typeshed/issues/2793
-            assert isinstance(spec.loader, Loader)
-            spec.loader.exec_module(module)
-            success, failure_message = module.run_test(self.ndk_path, self.config)
-            if success:
-                return Success(self), []
-            return Failure(self, failure_message), []
-
-
-class ShellBuildTest(BuildTest):
-    def determine_api_level_for_config(self) -> int:
-        return ndk.abis.min_api_for_abi(self.config.abi)
-
-    def get_build_dir(self, out_dir: Path) -> Path:
-        return out_dir / str(self.config) / "build.sh" / self.name
-
-    def run(
-        self, obj_dir: Path, _dist_dir: Path, _test_filters: TestFilter
-    ) -> Tuple[TestResult, List[Test]]:
-        build_dir = self.get_build_dir(obj_dir)
-        logger().info("Building test: %s", self.name)
-        if os.name == "nt":
-            reason = "build.sh tests are not supported on Windows"
-            return Skipped(self, reason), []
-        assert self.api is not None
-        result = _run_build_sh_test(
-            self,
-            build_dir,
-            self.test_dir,
-            self.ndk_path,
-            self.ndk_build_flags,
-            self.abi,
-            self.api,
-        )
-        return result, []
-
-
-def _run_build_sh_test(
-    test: ShellBuildTest,
-    build_dir: Path,
-    test_dir: Path,
-    ndk_path: Path,
-    ndk_build_flags: List[str],
-    abi: Abi,
-    platform: int,
-) -> TestResult:
-    _prep_build_dir(test_dir, build_dir)
-    with ndk.ext.os.cd(build_dir):
-        build_cmd = ["bash", "build.sh"] + _get_jobs_args() + ndk_build_flags
-        test_env = dict(os.environ)
-        test_env["NDK"] = str(ndk_path)
-        if abi is not None:
-            test_env["APP_ABI"] = abi
-        test_env["APP_PLATFORM"] = f"android-{platform}"
-        rc, out = ndk.ext.subprocess.call_output(
-            build_cmd, env=test_env, encoding="utf-8"
-        )
-        if rc == 0:
-            return Success(test)
-        return Failure(test, out)
-
-
-def _platform_from_application_mk(test_dir: Path) -> Optional[int]:
-    """Determine target API level from a test's Application.mk.
-
-    Args:
-        test_dir: Directory of the test to read.
-
-    Returns:
-        Integer portion of APP_PLATFORM if found, else None.
-
-    Raises:
-        ValueError: Found an unexpected value for APP_PLATFORM.
-    """
-    application_mk = test_dir / "jni" / "Application.mk"
-    if not application_mk.exists():
-        return None
-
-    with application_mk.open(encoding="utf-8") as application_mk_file:
-        for line in application_mk_file:
-            if line.startswith("APP_PLATFORM"):
-                _, platform_str = line.split(":=")
-                break
-        else:
-            return None
-
-    platform_str = platform_str.strip()
-    if not platform_str.startswith("android-"):
-        raise ValueError(platform_str)
-
-    _, api_level_str = platform_str.split("-")
-    return int(api_level_str)
-
-
-def _get_or_infer_app_platform(
-    overridden_runtime_minsdkversion: int | None,
-    test_dir: Path,
-    abi: Abi,
-) -> int:
-    """Determines the platform level to use for a test using ndk-build.
-
-    Choose the platform level from, in order of preference:
-    1. The value forced by the test_config.py using override_runtime_minsdkversion.
-    2. APP_PLATFORM from jni/Application.mk.
-    3. Default value for the target ABI.
-
-    Args:
-        overridden_runtime_minsdkversion: The test's forced runtime minSdkVersion. Might
-            differ from the build API level. This is rare (probably only static
-            executables).
-        test_dir: The directory containing the ndk-build project.
-        abi: The ABI being targeted.
-
-    Returns:
-        The platform version the test should build against.
-    """
-    if overridden_runtime_minsdkversion is not None:
-        return overridden_runtime_minsdkversion
-
-    minimum_version = ndk.abis.min_api_for_abi(abi)
-    platform_from_application_mk = _platform_from_application_mk(test_dir)
-    if platform_from_application_mk is not None:
-        if platform_from_application_mk >= minimum_version:
-            return platform_from_application_mk
-
-    return minimum_version
-
-
-class NdkBuildTest(BuildTest):
-    def __init__(
-        self,
-        name: str,
-        test_dir: Path,
-        config: BuildConfiguration,
-        ndk_path: Path,
-        dist: bool,
-    ) -> None:
-        super().__init__(name, test_dir, config, ndk_path)
-        self.dist = dist
-
-    def determine_api_level_for_config(self) -> int:
-        return _get_or_infer_app_platform(
-            self.get_overridden_runtime_minsdkversion(),
-            self.test_dir,
-            self.config.abi,
-        )
-
-    def get_dist_dir(self, obj_dir: Path, dist_dir: Path) -> Path:
-        if self.dist:
-            return self.get_build_dir(dist_dir)
-        return self.get_build_dir(obj_dir) / "dist"
-
-    def get_build_dir(self, out_dir: Path) -> Path:
-        return out_dir / str(self.config) / "ndk-build" / self.name
-
-    def run(
-        self, obj_dir: Path, dist_dir: Path, _test_filters: TestFilter
-    ) -> Tuple[TestResult, List[Test]]:
-        logger().info("Building test: %s", self.name)
-        obj_dir = self.get_build_dir(obj_dir)
-        dist_dir = self.get_dist_dir(obj_dir, dist_dir)
-        assert self.api is not None
-        proc = _run_ndk_build_test(
-            obj_dir,
-            dist_dir,
-            self.test_dir,
-            self.ndk_path,
-            self.ndk_build_flags,
-            self.abi,
-        )
-        if (failure := self.verify_no_cruft_in_dist(dist_dir, proc.args)) is not None:
-            return failure, []
-        return self.make_build_result(proc), []
-
-
-def _run_ndk_build_test(
-    obj_dir: Path,
-    dist_dir: Path,
-    test_dir: Path,
-    ndk_path: Path,
-    ndk_build_flags: List[str],
-    abi: Abi,
-) -> CompletedProcess[str]:
-    _prep_build_dir(test_dir, obj_dir)
-    with ndk.ext.os.cd(obj_dir):
-        args = [
-            f"APP_ABI={abi}",
-            f"NDK_LIBS_OUT={dist_dir}",
-        ] + _get_jobs_args()
-        return ndk.ndkbuild.build(ndk_path, args + ndk_build_flags)
-
-
-class CMakeBuildTest(BuildTest):
-    def __init__(
-        self,
-        name: str,
-        test_dir: Path,
-        config: BuildConfiguration,
-        ndk_path: Path,
-        dist: bool,
-    ) -> None:
-        super().__init__(name, test_dir, config, ndk_path)
-        self.dist = dist
-
-    def determine_api_level_for_config(self) -> int:
-        return _get_or_infer_app_platform(
-            self.get_overridden_runtime_minsdkversion(),
-            self.test_dir,
-            self.config.abi,
-        )
-
-    def get_dist_dir(self, obj_dir: Path, dist_dir: Path) -> Path:
-        if self.dist:
-            return self.get_build_dir(dist_dir)
-        return self.get_build_dir(obj_dir) / "dist"
-
-    def get_build_dir(self, out_dir: Path) -> Path:
-        return out_dir / str(self.config) / "cmake" / self.name
-
-    def run(
-        self, obj_dir: Path, dist_dir: Path, _test_filters: TestFilter
-    ) -> Tuple[TestResult, List[Test]]:
-        obj_dir = self.get_build_dir(obj_dir)
-        dist_dir = self.get_dist_dir(obj_dir, dist_dir)
-        logger().info("Building test: %s", self.name)
-        assert self.api is not None
-        proc = _run_cmake_build_test(
-            obj_dir,
-            dist_dir,
-            self.test_dir,
-            self.ndk_path,
-            self.cmake_flags,
-            self.abi,
-            self.config.toolchain_file == CMakeToolchainFile.Legacy,
-        )
-        if (failure := self.verify_no_cruft_in_dist(dist_dir, proc.args)) is not None:
-            return failure, []
-        return self.make_build_result(proc), []
-
-
-def _run_cmake_build_test(
-    obj_dir: Path,
-    dist_dir: Path,
-    test_dir: Path,
-    ndk_path: Path,
-    cmake_flags: List[str],
-    abi: str,
-    use_legacy_toolchain_file: bool,
-) -> CompletedProcess[str]:
-    _prep_build_dir(test_dir, obj_dir)
-
-    cmake_bin = find_cmake()
-    ninja_bin = find_ninja()
-
-    toolchain_file = ndk_path / "build" / "cmake" / "android.toolchain.cmake"
-    abi_obj_dir = obj_dir / abi
-    abi_lib_dir = dist_dir / abi
-    args = [
-        f"-H{obj_dir}",
-        f"-B{abi_obj_dir}",
-        f"-DCMAKE_TOOLCHAIN_FILE={toolchain_file}",
-        f"-DANDROID_ABI={abi}",
-        f"-DCMAKE_RUNTIME_OUTPUT_DIRECTORY={abi_lib_dir}",
-        f"-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={abi_lib_dir}",
-        "-GNinja",
-        f"-DCMAKE_MAKE_PROGRAM={ninja_bin}",
-    ]
-    if use_legacy_toolchain_file:
-        args.append("-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=ON")
-    else:
-        args.append("-DANDROID_USE_LEGACY_TOOLCHAIN_FILE=OFF")
-    proc = subprocess.run(
-        [str(cmake_bin)] + args + cmake_flags,
-        check=False,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
-    if proc.returncode != 0:
-        return proc
-    return subprocess.run(
-        [str(cmake_bin), "--build", str(abi_obj_dir), "--"] + _get_jobs_args(),
-        check=False,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.PIPE,
-        encoding="utf-8",
-    )
diff --git a/ndk/test/buildtest/scanner.py b/ndk/test/buildtest/scanner.py
deleted file mode 100644
index 1582145..0000000
--- a/ndk/test/buildtest/scanner.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from __future__ import absolute_import
-
-import glob
-from pathlib import Path
-from typing import List, Set
-
-from ndk.test.buildtest.case import (
-    CMakeBuildTest,
-    NdkBuildTest,
-    PythonBuildTest,
-    ShellBuildTest,
-    Test,
-)
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
-
-
-class TestScanner:
-    """Creates a Test objects for a given test directory.
-
-    A test scanner is used to turn a test directory into a list of Tests for
-    any of the test types found in the directory.
-    """
-
-    def find_tests(self, path: Path, name: str) -> List[Test]:
-        """Searches a directory for tests.
-
-        Args:
-            path: Path to the test directory.
-            name: Name of the test.
-
-        Returns: List of Tests, possibly empty.
-        """
-        raise NotImplementedError
-
-
-class BuildTestScanner(TestScanner):
-    def __init__(self, ndk_path: Path, dist: bool = True) -> None:
-        self.ndk_path = ndk_path
-        self.dist = dist
-        self.build_configurations: Set[BuildConfiguration] = set()
-
-    def add_build_configuration(self, spec: BuildConfiguration) -> None:
-        self.build_configurations.add(spec)
-
-    def find_tests(self, path: Path, name: str) -> List[Test]:
-        # If we have a build.sh, that takes precedence over the Android.mk.
-        build_sh_path = path / "build.sh"
-        if build_sh_path.exists():
-            return self.make_build_sh_tests(path, name)
-
-        # Same for test.py
-        test_py_path = path / "test.py"
-        if test_py_path.exists():
-            return self.make_test_py_tests(path, name)
-
-        # But we can have both ndk-build and cmake tests in the same directory.
-        tests: List[Test] = []
-        # NB: This isn't looking for Android.mk specifically (even though on
-        # that would mostly be a better test) because we have a test that
-        # verifies that ndk-build still works when APP_BUILD_SCRIPT is set to
-        # something _other_ than a file named Android.mk.
-        mk_glob = glob.glob(str(path / "jni/*.mk"))
-        if mk_glob:
-            tests.extend(self.make_ndk_build_tests(path, name))
-
-        cmake_lists_path = path / "CMakeLists.txt"
-        if cmake_lists_path.exists():
-            tests.extend(self.make_cmake_tests(path, name))
-        return tests
-
-    def make_build_sh_tests(self, path: Path, name: str) -> List[Test]:
-        return [
-            ShellBuildTest(name, path, config, self.ndk_path)
-            for config in self.build_configurations
-            if config.toolchain_file == CMakeToolchainFile.Default
-        ]
-
-    def make_test_py_tests(self, path: Path, name: str) -> List[Test]:
-        return [
-            PythonBuildTest(name, path, config, self.ndk_path)
-            for config in self.build_configurations
-        ]
-
-    def make_ndk_build_tests(self, path: Path, name: str) -> List[Test]:
-        return [
-            NdkBuildTest(name, path, config, self.ndk_path, self.dist)
-            for config in self.build_configurations
-            if config.toolchain_file == CMakeToolchainFile.Default
-        ]
-
-    def make_cmake_tests(self, path: Path, name: str) -> List[Test]:
-        return [
-            CMakeBuildTest(name, path, config, self.ndk_path, self.dist)
-            for config in self.build_configurations
-        ]
diff --git a/ndk/test/config.py b/ndk/test/config.py
index b033202..922734b 100644
--- a/ndk/test/config.py
+++ b/ndk/test/config.py
@@ -13,13 +13,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-import importlib.util
-from importlib.abc import Loader
-from pathlib import Path
+import imp
+import os
 from types import ModuleType
-from typing import Any, Callable, List, Optional, Tuple, Union
+from typing import Any, List, Optional, Tuple, Union
 
-from ndk.test.devices import DeviceConfig
+from ndk.test.devices import Device
+
 
 # Need to refactor to resolve the circular import between this module and
 # ndk.test.types.
@@ -54,7 +54,8 @@
     class NullTestConfig:
         # pylint: disable=unused-argument
         @staticmethod
-        def build_broken(test: Test) -> Union[Tuple[None, None], Tuple[str, str]]:
+        def build_broken(
+                test: Test) -> Union[Tuple[None, None], Tuple[str, str]]:
             """Tests if a given configuration is known broken.
 
             A broken test is a known failing test that should be fixed.
@@ -75,7 +76,7 @@
             """Tests if a given configuration is unsupported.
 
             An unsupported test is a test that do not make sense to run for a
-            given configuration. Testing x86 assembler on arm, for example.
+            given configuration. Testing x86 assembler on MIPS, for example.
 
             These tests will not be built or run.
 
@@ -114,98 +115,54 @@
                     return None, None
             """
             return False
-
-        @staticmethod
-        def override_runtime_minsdkversion(test: Test) -> int | None:
-            """Overrides the minSdkVersion that will be used for determining OS compat.
-
-            Static executables have the unusual build requirement that they always be
-            built with the latest API level, but are compatible with old devices. We
-            need to specify `APP_PLATFORM := latest` for those tests, but the test
-            runner needs to run them on old devices. There isn't an easy way to infer
-            this, nor are there many static executable tests, so those tests instead
-            override their minSdkVersion rather than letting the test builder infer it
-            from APP_PLATFORM.
-            """
-            return None
-
         # pylint: enable=unused-argument
 
-    def __init__(self, test_config_py: Path) -> None:
+    def __init__(self, file_path: str) -> None:
         # Note that this namespace isn't actually meaningful from our side;
         # it's only what the loaded module's __name__ gets set to.
-        dirname = test_config_py.parent
-        namespace = ".".join([str(dirname), "test_config"])
+        dirname = os.path.dirname(file_path)
+        namespace = '.'.join([dirname, 'test_config'])
 
-        self.module = self.load_module(namespace, test_config_py)
+        try:
+            self.module: Optional[ModuleType] = imp.load_source(
+                namespace, file_path)
+        except IOError:
+            self.module = None
 
         # mypy doesn't understand that the type doesn't matter because we're
         # checking for errors with AttributeError. It doesn't understand
         # hasattr either.
         # https://github.com/python/mypy/issues/1424
         try:
-            self.build_broken: Callable[
-                [Test], Union[tuple[None, None], tuple[str, str]]
-            ] = self.module.build_broken  # type: ignore
+            self.build_broken = self.module.build_broken  # type: ignore
         except AttributeError:
             self.build_broken = self.NullTestConfig.build_broken
 
         try:
-            self.build_unsupported: Callable[
-                [Test], Optional[str]
-            ] = self.module.build_unsupported  # type: ignore
+            self.build_unsupported = self.module.build_unsupported  # type: ignore
         except AttributeError:
             self.build_unsupported = self.NullTestConfig.build_unsupported
 
         try:
-            self.extra_cmake_flags: Callable[
-                [], list[str]
-            ] = self.module.extra_cmake_flags  # type: ignore
+            self.extra_cmake_flags = self.module.extra_cmake_flags  # type: ignore
         except AttributeError:
             self.extra_cmake_flags = self.NullTestConfig.extra_cmake_flags
 
         try:
-            self.extra_ndk_build_flags: Callable[
-                [], list[str]
-            ] = self.module.extra_ndk_build_flags  # type: ignore
+            self.extra_ndk_build_flags = self.module.extra_ndk_build_flags  # type: ignore
         except AttributeError:
             ntc = self.NullTestConfig
             self.extra_ndk_build_flags = ntc.extra_ndk_build_flags
 
         try:
-            self.is_negative_test: Callable[
-                [], bool
-            ] = self.module.is_negative_test  # type: ignore
+            self.is_negative_test = self.module.is_negative_test  # type: ignore
         except AttributeError:
             self.is_negative_test = self.NullTestConfig.is_negative_test
 
-        try:
-            self.override_runtime_minsdkversion: Callable[
-                [Test], int | None
-            ] = self.module.override_runtime_minsdkversion  # type: ignore
-        except AttributeError:
-            self.override_runtime_minsdkversion = (
-                self.NullTestConfig.override_runtime_minsdkversion
-            )
-
     @classmethod
-    def from_test_dir(cls, test_dir: Path) -> "TestConfig":
-        return cls(test_dir / "test_config.py")
-
-    @staticmethod
-    def load_module(namespace: str, path: Path) -> Optional[ModuleType]:
-        if not path.exists():
-            return None
-
-        # https://stackoverflow.com/a/67692/632035
-        spec = importlib.util.spec_from_file_location(namespace, path)
-        if spec is None or spec.loader is None:
-            raise RuntimeError(f"Could not import {path}")
-        module = importlib.util.module_from_spec(spec)
-        # https://github.com/python/typeshed/issues/2793
-        assert isinstance(spec.loader, Loader)
-        spec.loader.exec_module(module)
-        return module
+    def from_test_dir(cls, test_dir: str) -> 'TestConfig':
+        path = os.path.join(test_dir, 'test_config.py')
+        return cls(path)
 
 
 class DeviceTestConfig(TestConfig):
@@ -214,51 +171,73 @@
     We need to mark some tests as broken or unsupported based on what device
     they are running on, as opposed to just what they were built for.
     """
-
     class NullTestConfig(TestConfig.NullTestConfig):
         # pylint: disable=unused-argument
         @staticmethod
-        def run_broken(
-            test: Test, device: DeviceConfig
-        ) -> Union[Tuple[None, None], Tuple[str, str]]:
+        def run_broken(test: Test, device: Device
+                       ) -> Union[Tuple[None, None], Tuple[str, str]]:
             return None, None
 
         @staticmethod
-        def run_unsupported(test: Test, device: DeviceConfig) -> Optional[str]:
+        def run_unsupported(test: Test, device: Device) -> Optional[str]:
             return None
 
         @staticmethod
         def extra_cmake_flags() -> List[str]:
             return []
-
         # pylint: enable=unused-argument
 
-    def __init__(self, test_config_py: Path) -> None:
-        super().__init__(test_config_py)
+    def __init__(self, file_path: str) -> None:
+        super().__init__(file_path)
 
         try:
-            self.run_broken: Callable[
-                [Test, DeviceConfig], Union[tuple[None, None], tuple[str, str]]
-            ] = self.module.run_broken  # type: ignore
+            self.run_broken = self.module.run_broken  # type: ignore
         except AttributeError:
             self.run_broken = self.NullTestConfig.run_broken
 
         try:
-            self.run_unsupported: Callable[
-                [Test, DeviceConfig], Optional[str]
-            ] = self.module.run_unsupported  # type: ignore
+            self.run_unsupported = self.module.run_unsupported  # type: ignore
         except AttributeError:
             self.run_unsupported = self.NullTestConfig.run_unsupported
 
-        if hasattr(self.module, "is_negative_test"):
+        if hasattr(self.module, 'is_negative_test'):
             # If the build is expected to fail, then it should just be a build
             # test since the test should never be run.
             #
             # If the run is expected to fail, just fix the test to pass for
             # thatr case. Gtest death tests can handle the more complicated
             # cases.
-            raise RuntimeError("is_negative_test is invalid for device tests")
+            raise RuntimeError('is_negative_test is invalid for device tests')
 
     @classmethod
-    def from_test_dir(cls, test_dir: Path) -> "DeviceTestConfig":
-        return cls(test_dir / "test_config.py")
+    def from_test_dir(cls, test_dir: str) -> 'DeviceTestConfig':
+        path = os.path.join(test_dir, 'test_config.py')
+        return cls(path)
+
+
+class LibcxxTestConfig(DeviceTestConfig):
+    """Specialization of test_config.py for libc++.
+
+    The libc++ tests have multiple tests in a single directory, so we need to
+    pass the test name for build_broken too.
+    """
+    class NullTestConfig(TestConfig.NullTestConfig):
+        # pylint: disable=unused-argument,arguments-differ
+        @staticmethod
+        def build_unsupported(test: Test) -> Optional[str]:
+            return None
+
+        @staticmethod
+        def build_broken(
+                test: Test) -> Union[Tuple[None, None], Tuple[str, str]]:
+            return None, None
+
+        @staticmethod
+        def run_unsupported(test: Test, device: Device) -> Optional[str]:
+            return None
+
+        @staticmethod
+        def run_broken(test: Test, device: Device
+                       ) -> Union[Tuple[None, None], Tuple[str, str]]:
+            return None, None
+        # pylint: enable=unused-argument,arguments-differ
diff --git a/ndk/test/devices.py b/ndk/test/devices.py
index 7074649..96381ef 100644
--- a/ndk/test/devices.py
+++ b/ndk/test/devices.py
@@ -14,19 +14,17 @@
 # limitations under the License.
 #
 """Device wrappers and device fleet management."""
-from __future__ import annotations
+from __future__ import print_function
 
 import logging
-import os
 import re
 import shutil
 import subprocess
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Dict, List, Optional, Set
+from typing import Any, Dict, List, Optional, Set
 
-import ndk.paths
 from ndk.abis import Abi
+import ndk.ext.shutil
+import ndk.paths
 from ndk.test.spec import BuildConfiguration
 from ndk.workqueue import ShardingGroup, Worker, WorkQueue
 
@@ -34,8 +32,7 @@
     import adb  # pylint: disable=import-error
 except ImportError:
     import site
-
-    site.addsitedir(str(ndk.paths.android_path("development/python-packages/adb")))
+    site.addsitedir(ndk.paths.android_path('development/python-packages'))
     import adb  # pylint: disable=import-error,ungrouped-imports
 
 
@@ -44,31 +41,8 @@
     return logging.getLogger(__name__)
 
 
-@dataclass(frozen=True)
-class DeviceConfig:
-    abis: list[Abi]
-    version: int
-    supports_mte: bool
-
-    def can_run_build_config(self, config: BuildConfiguration) -> bool:
-        assert config.api is not None
-        if self.version < config.api:
-            # Device is too old for this test.
-            return False
-
-        if config.abi not in self.abis:
-            return False
-
-        return True
-
-
 class Device(adb.AndroidDevice):
     """A device to be used for testing."""
-
-    # We have no type information for the adb module so mypy can't reason about
-    # it. At least let it know that there's a serial property that is a string.
-    serial: str
-
     # pylint: disable=no-member
     def __init__(self, serial: str, precache: bool = False) -> None:
         super().__init__(serial)
@@ -78,51 +52,40 @@
         self._ro_build_id: Optional[str] = None
         self._ro_build_version_sdk: Optional[str] = None
         self._ro_build_version_codename: Optional[str] = None
-        self._ro_debuggable: Optional[str] = None
+        self._ro_debuggable: Optional[bool] = None
         self._ro_product_name: Optional[str] = None
-        self._supports_mte: bool = False
 
         if precache:
             self.cache_properties()
 
-    def config(self) -> DeviceConfig:
-        return DeviceConfig(self.abis, self.version, self.supports_mte)
-
     def cache_properties(self) -> None:
         """Caches the device's system properties."""
         if not self._did_cache:
-            self._ro_build_characteristics = self.get_prop("ro.build.characteristics")
-            self._ro_build_id = self.get_prop("ro.build.id")
-            self._ro_build_version_sdk = self.get_prop("ro.build.version.sdk")
-            self._ro_build_version_codename = self.get_prop("ro.build.version.codename")
-            self._ro_debuggable = self.get_prop("ro.debuggable")
-            self._ro_product_name = self.get_prop("ro.product.name")
+            self._ro_build_characteristics = self.get_prop(
+                'ro.build.characteristics')
+            self._ro_build_id = self.get_prop('ro.build.id')
+            self._ro_build_version_sdk = self.get_prop('ro.build.version.sdk')
+            self._ro_build_version_codename = self.get_prop(
+                'ro.build.version.codename')
+            self._ro_debuggable = self.get_prop('ro.debuggable')
+            self._ro_product_name = self.get_prop('ro.product.name')
             self._did_cache = True
 
             # 64-bit devices list their ABIs differently than 32-bit devices.
             # Check all the possible places for stashing ABI info and merge
             # them.
             abi_properties = [
-                "ro.product.cpu.abi",
-                "ro.product.cpu.abi2",
-                "ro.product.cpu.abilist",
+                'ro.product.cpu.abi',
+                'ro.product.cpu.abi2',
+                'ro.product.cpu.abilist',
             ]
             abis: Set[Abi] = set()
             for abi_prop in abi_properties:
                 value = self.get_prop(abi_prop)
                 if value is not None:
-                    abis.update([Abi(s) for s in value.split(",")])
-
-            if "x86_64" in abis:
-                # Don't allow ndk_translation to count as an arm test device.
-                # We need to verify that things work on actual Arm, not that
-                # they work when binary translated for x86.
-                abis.difference_update({"arm64-v8a", "armeabi-v7a"})
+                    abis.update(value.split(','))
 
             self._cached_abis = sorted(list(abis))
-            self._supports_mte = (
-                self.shell_nocheck(["grep", " mte", "/proc/cpuinfo"])[0] == 0
-            )
 
     @property
     def name(self) -> str:
@@ -153,13 +116,13 @@
     def is_release(self) -> bool:
         self.cache_properties()
         codename = self._ro_build_version_codename
-        return codename == "REL"
+        return codename == 'REL'
 
     @property
     def is_emulator(self) -> bool:
         self.cache_properties()
         chars = self._ro_build_characteristics
-        return chars == "emulator"
+        return chars == 'emulator'
 
     @property
     def is_debuggable(self) -> bool:
@@ -168,20 +131,24 @@
         return int(self._ro_debuggable) != 0
 
     def can_run_build_config(self, config: BuildConfiguration) -> bool:
-        return self.config().can_run_build_config(config)
+        assert config.api is not None
+        if self.version < config.api:
+            # Device is too old for this test.
+            return False
+
+        if config.abi not in self.abis:
+            return False
+
+        return True
 
     @property
     def supports_pie(self) -> bool:
         return self.version >= 16
 
-    @property
-    def supports_mte(self) -> bool:
-        self.cache_properties()
-        assert self._supports_mte is not None
-        return self._supports_mte
-
     def __str__(self) -> str:
-        return f"android-{self.version} {self.name} {self.serial} {self.build_id}"
+        return (
+            f'android-{self.version} {self.name} {self.serial} {self.build_id}'
+        )
 
     def __eq__(self, other: object) -> bool:
         assert isinstance(other, Device)
@@ -191,53 +158,30 @@
         return hash(self.serial)
 
 
-class DeviceShardingGroup(ShardingGroup[Device]):
+class DeviceShardingGroup(ShardingGroup):
     """A collection of devices that should be identical for testing purposes.
 
     For the moment, devices are only identical for testing purposes if they are
     the same hardware running the same build.
     """
-
-    def __init__(
-        self,
-        devices: list[Device],
-        abis: list[Abi],
-        version: int,
-        is_emulator: bool,
-        is_release: bool,
-        is_debuggable: bool,
-        supports_mte: bool,
-    ) -> None:
-        self.devices = devices
-        self.abis = abis
-        self.version = version
-        self.is_emulator = is_emulator
-        self.is_release = is_release
-        self.is_debuggable = is_debuggable
-        self.supports_mte = supports_mte
-
-    @classmethod
-    def with_first_device(cls, first_device: Device) -> DeviceShardingGroup:
-        return DeviceShardingGroup(
-            [first_device],
-            sorted(first_device.abis),
-            first_device.version,
-            first_device.is_emulator,
-            first_device.is_release,
-            first_device.is_debuggable,
-            first_device.supports_mte,
-        )
+    def __init__(self, first_device: Device) -> None:
+        self.devices = [first_device]
+        self.abis = sorted(first_device.abis)
+        self.version = first_device.version
+        self.is_emulator = first_device.is_emulator
+        self.is_release = first_device.is_release
+        self.is_debuggable = first_device.is_debuggable
 
     def __str__(self) -> str:
         return f'android-{self.version} {" ".join(self.abis)}'
 
     @property
-    def shards(self) -> list[Device]:
+    def shards(self) -> List[Any]:
         return self.devices
 
     def add_device(self, device: Device) -> None:
         if not self.device_matches(device):
-            raise ValueError(f"{device} does not match this device group.")
+            raise ValueError(f'{device} does not match this device group.')
 
         self.devices.append(device)
 
@@ -252,8 +196,6 @@
             return False
         if self.is_debuggable != device.is_debuggable:
             return False
-        if self.supports_mte != device.supports_mte:
-            return False
         return True
 
     def __eq__(self, other: object) -> bool:
@@ -268,30 +210,24 @@
             return False
         if self.is_debuggable != other.is_debuggable:
             return False
-        if self.supports_mte != other.supports_mte:
-            return False
         if self.devices != other.devices:
-            print("devices not equal: {}, {}".format(self.devices, other.devices))
+            print('devices not equal: {}, {}'.format(
+                self.devices, other.devices))
             return False
         return True
 
+    def __lt__(self, other: object) -> bool:
+        assert isinstance(other, DeviceShardingGroup)
+        return (self.version, self.abis) < (other.version, other.abis)
+
     def __hash__(self) -> int:
-        return hash(
-            (
-                self.version,
-                self.is_emulator,
-                self.is_release,
-                self.is_debuggable,
-                tuple(self.abis),
-                tuple(self.devices),
-                self.supports_mte,
-            )
-        )
+        return hash((
+            self.version, self.is_emulator, self.is_release,
+            self.is_debuggable, tuple(self.abis), tuple(self.devices)))
 
 
 class DeviceFleet:
     """A collection of devices that can be used for testing."""
-
     def __init__(self, test_configurations: Dict[int, List[Abi]]) -> None:
         """Initializes a device fleet.
 
@@ -311,7 +247,7 @@
     def add_device(self, device: Device) -> None:
         """Fills a fleet device slot with a device, if appropriate."""
         if device.version not in self.devices:
-            logger().info("Ignoring device for unwanted API level: %s", device)
+            logger().info('Ignoring device for unwanted API level: %s', device)
             return
 
         same_version = self.devices[device.version]
@@ -321,14 +257,12 @@
                 continue
 
             # Never houdini.
-            if abi.startswith("armeabi") and "x86" in device.abis:
+            if abi.startswith('armeabi') and 'x86' in device.abis:
                 continue
 
             # Anything is better than nothing.
             if current_group is None:
-                self.devices[device.version][abi] = (
-                    DeviceShardingGroup.with_first_device(device)
-                )
+                self.devices[device.version][abi] = DeviceShardingGroup(device)
                 continue
 
             if current_group.device_matches(device):
@@ -338,22 +272,12 @@
             # The emulator images have actually been changed over time, so the
             # devices are more trustworthy.
             if current_group.is_emulator and not device.is_emulator:
-                self.devices[device.version][abi] = (
-                    DeviceShardingGroup.with_first_device(device)
-                )
+                self.devices[device.version][abi] = DeviceShardingGroup(device)
 
             # Trust release builds over pre-release builds, but don't block
             # pre-release because sometimes that's all there is.
             if not current_group.is_release and device.is_release:
-                self.devices[device.version][abi] = (
-                    DeviceShardingGroup.with_first_device(device)
-                )
-
-            # If we have a device that supports MTE, prefer that.
-            if not current_group.supports_mte and device.supports_mte:
-                self.devices[device.version][abi] = (
-                    DeviceShardingGroup.with_first_device(device)
-                )
+                self.devices[device.version][abi] = DeviceShardingGroup(device)
 
     def get_unique_device_groups(self) -> Set[DeviceShardingGroup]:
         groups = set()
@@ -364,7 +288,8 @@
                     groups.add(group)
         return groups
 
-    def get_device_group(self, version: int, abi: Abi) -> Optional[DeviceShardingGroup]:
+    def get_device_group(self, version: int,
+                         abi: Abi) -> Optional[DeviceShardingGroup]:
         """Returns the device group associated with the given API and ABI."""
         if version not in self.devices:
             return None
@@ -372,23 +297,13 @@
             return None
         return self.devices[version][abi]
 
-    def get_missing(self) -> list[DeviceShardingGroup]:
-        """Describes desired configurations without available devices."""
+    def get_missing(self) -> List[str]:
+        """Describes desired configurations without available deices."""
         missing = []
         for version, abis in self.devices.items():
             for abi, group in abis.items():
                 if group is None:
-                    missing.append(
-                        DeviceShardingGroup(
-                            [],
-                            [abi],
-                            version,
-                            is_emulator=False,
-                            is_release=True,
-                            is_debuggable=False,
-                            supports_mte=False,
-                        )
-                    )
+                    missing.append(f'android-{version} {abi}')
         return missing
 
     def get_versions(self) -> List[int]:
@@ -406,31 +321,31 @@
 
 def get_all_attached_devices(workqueue: WorkQueue) -> List[Device]:
     """Returns a list of all connected devices."""
-    if shutil.which("adb") is None:
-        raise RuntimeError("Could not find adb.")
+    if shutil.which('adb') is None:
+        raise RuntimeError('Could not find adb.')
 
     # We could get the device name from `adb devices -l`, but we need to
     # getprop to find other details anyway, and older devices don't report
     # their names properly (nakasi on android-16, for example).
-    p = subprocess.run(
-        ["adb", "devices"], check=True, stdout=subprocess.PIPE, encoding="utf-8"
-    )
+    p = subprocess.Popen(['adb', 'devices'], stdout=subprocess.PIPE)
+    out, _ = p.communicate()
+    out = out.decode('utf-8')
     if p.returncode != 0:
-        raise RuntimeError("Failed to get list of devices from adb.")
+        raise RuntimeError('Failed to get list of devices from adb.')
 
     # The first line of `adb devices` just says "List of attached devices", so
     # skip that.
-    for line in p.stdout.split("\n")[1:]:
+    for line in out.split('\n')[1:]:
         if not line.strip():
             continue
 
-        serial, _ = re.split(r"\s+", line, maxsplit=1)
+        serial, _ = re.split(r'\s+', line, maxsplit=1)
 
-        if "offline" in line:
-            logger().info("Ignoring offline device: %s", serial)
+        if 'offline' in line:
+            logger().info('Ignoring offline device: %s', serial)
             continue
-        if "unauthorized" in line:
-            logger().info("Ignoring unauthorized device: %s", serial)
+        if 'unauthorized' in line:
+            logger().info('Ignoring unauthorized device: %s', serial)
             continue
 
         # Caching all the device details via getprop can actually take quite a
@@ -440,24 +355,14 @@
     devices = []
     while not workqueue.finished():
         device = workqueue.get_result()
-        logger().info("Found device %s", device)
+        logger().info('Found device %s', device)
         devices.append(device)
 
     return devices
 
 
-def exclude_device(device: Device) -> bool:
-    """Returns True if a device should be excluded from the fleet."""
-    exclusion_list_env = os.getenv("NDK_DEVICE_EXCLUSION_LIST")
-    if exclusion_list_env is None:
-        return False
-    exclusion_list = Path(exclusion_list_env).read_text(encoding="utf-8").splitlines()
-    return device.serial in exclusion_list
-
-
-def find_devices(
-    sought_devices: Dict[int, List[Abi]], workqueue: WorkQueue
-) -> DeviceFleet:
+def find_devices(sought_devices: Dict[int, List[Abi]],
+                 workqueue: WorkQueue) -> DeviceFleet:
     """Detects connected devices and returns a set for testing.
 
     We get a list of devices by scanning the output of `adb devices` and
@@ -466,7 +371,6 @@
     """
     fleet = DeviceFleet(sought_devices)
     for device in get_all_attached_devices(workqueue):
-        if not exclude_device(device):
-            fleet.add_device(device)
+        fleet.add_device(device)
 
     return fleet
diff --git a/ndk/test/devicetest/__init__.py b/ndk/test/devicetest/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/ndk/test/devicetest/__init__.py
+++ /dev/null
diff --git a/ndk/test/devicetest/case.py b/ndk/test/devicetest/case.py
deleted file mode 100644
index 0347a64..0000000
--- a/ndk/test/devicetest/case.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import logging
-import shlex
-import traceback
-from pathlib import Path, PurePosixPath
-from typing import Optional, Tuple, Union
-
-from ndk.test.config import DeviceTestConfig
-from ndk.test.devices import Device, DeviceConfig
-from ndk.test.spec import BuildConfiguration
-
-AdbResult = tuple[int, str, str, str]
-
-
-def logger() -> logging.Logger:
-    """Returns the module logger."""
-    return logging.getLogger(__name__)
-
-
-def shell_nocheck_wrap_errors(device: Device, cmd: str) -> AdbResult:
-    """Invokes device.shell_nocheck and wraps exceptions as failed commands."""
-    repro_cmd = f"adb -s {device.serial} shell {shlex.quote(cmd)}"
-    try:
-        rc, stdout, stderr = device.shell_nocheck([cmd])
-        return rc, stdout, stderr, repro_cmd
-    except RuntimeError:
-        return 1, cmd, traceback.format_exc(), repro_cmd
-
-
-# TODO: Extract a common interface from this and ndk.test.case.build.Test for the
-# printer.
-class TestCase:
-    """A device test case found in the dist directory.
-
-    The test directory is structured as tests/dist/$CONFIG/$BUILD_SYSTEM/...
-    What follows depends on the type of test case. Each discovered test case
-    will have a name, a build configuration, a build system, and a device
-    directory.
-    """
-
-    def __init__(
-        self,
-        name: str,
-        test_src_dir: Path,
-        config: BuildConfiguration,
-        build_system: str,
-        device_dir: PurePosixPath,
-    ) -> None:
-        self.name = name
-        self.test_src_dir = test_src_dir
-        self.config = config
-        self.build_system = build_system
-        self.device_dir = device_dir
-
-    def check_unsupported(self, device: DeviceConfig) -> Optional[str]:
-        raise NotImplementedError
-
-    def check_broken(
-        self, device: DeviceConfig
-    ) -> Union[Tuple[None, None], Tuple[str, str]]:
-        raise NotImplementedError
-
-    def run(self, device: Device) -> AdbResult:
-        logger().info('%s: shell_nocheck "%s"', device.name, self.cmd)
-        return shell_nocheck_wrap_errors(device, self.cmd)
-
-    @property
-    def cmd(self) -> str:
-        """The shell command to run on the device to execute the test case."""
-        raise NotImplementedError
-
-    @property
-    def negated_cmd(self) -> str:
-        """The command to execute the test case, but with the exit code flipped."""
-        return f"! ( {self.cmd} )"
-
-    def __str__(self) -> str:
-        return f"{self.name} [{self.config}]"
-
-
-class BasicTestCase(TestCase):
-    """A test case for the standard NDK test builder.
-
-    These tests were written specifically for the NDK and thus follow the
-    layout we expect. In each test configuration directory, we have
-    $TEST_SUITE/$ABI/$TEST_FILES. $TEST_FILES includes both the shared
-    libraries for the test and the test executables.
-    """
-
-    def __init__(
-        self,
-        suite: str,
-        executable: str,
-        test_src_dir: Path,
-        config: BuildConfiguration,
-        build_system: str,
-        device_dir: PurePosixPath,
-    ) -> None:
-        name = ".".join([suite, executable])
-        super().__init__(name, test_src_dir, config, build_system, device_dir)
-
-        self.suite = suite
-        self.executable = executable
-
-    def get_test_config(self) -> DeviceTestConfig:
-        # We don't run anything in tests/build. We can safely assume that anything here
-        # is in tests/device.
-        test_dir = self.test_src_dir / "device" / self.suite
-        return DeviceTestConfig.from_test_dir(test_dir)
-
-    def check_unsupported(self, device: DeviceConfig) -> Optional[str]:
-        return self.get_test_config().run_unsupported(self, device)
-
-    def check_broken(
-        self, device: DeviceConfig
-    ) -> Union[Tuple[None, None], Tuple[str, str]]:
-        return self.get_test_config().run_broken(self, device)
-
-    @property
-    def cmd(self) -> str:
-        return "cd {} && LD_LIBRARY_PATH={} ./{} 2>&1".format(
-            self.device_dir, self.device_dir, self.executable
-        )
diff --git a/ndk/test/devicetest/scanner.py b/ndk/test/devicetest/scanner.py
deleted file mode 100644
index 67a30f9..0000000
--- a/ndk/test/devicetest/scanner.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import logging
-import os
-from pathlib import Path, PurePosixPath
-from typing import Callable, Dict, List
-
-from ndk.test.devicetest.case import BasicTestCase, TestCase
-from ndk.test.filters import TestFilter
-from ndk.test.spec import BuildConfiguration, TestSpec
-
-
-def logger() -> logging.Logger:
-    """Returns the module logger."""
-    return logging.getLogger(__name__)
-
-
-def _enumerate_basic_tests(
-    out_dir_base: Path,
-    test_src_dir: Path,
-    device_base_dir: PurePosixPath,
-    build_cfg: BuildConfiguration,
-    build_system: str,
-    test_filter: TestFilter,
-) -> List[TestCase]:
-    tests: List[TestCase] = []
-    tests_dir = out_dir_base / str(build_cfg) / build_system
-    if not tests_dir.exists():
-        return tests
-
-    for test_subdir in os.listdir(tests_dir):
-        test_dir = tests_dir / test_subdir
-        out_dir = test_dir / build_cfg.abi
-        test_relpath = out_dir.relative_to(out_dir_base)
-        device_dir = device_base_dir / test_relpath
-        for test_file in os.listdir(out_dir):
-            if test_file.endswith(".so"):
-                continue
-            if test_file.endswith(".sh"):
-                continue
-            if test_file.endswith(".a"):
-                test_path = out_dir / test_file
-                logger().error(
-                    "Found static library in app install directory. Static "
-                    "libraries should never be installed. This is a bug in "
-                    "the build system: %s",
-                    test_path,
-                )
-                continue
-            name = ".".join([test_subdir, test_file])
-            if not test_filter.filter(name):
-                continue
-            tests.append(
-                BasicTestCase(
-                    test_subdir,
-                    test_file,
-                    test_src_dir,
-                    build_cfg,
-                    build_system,
-                    device_dir,
-                )
-            )
-    return tests
-
-
-class ConfigFilter:
-    def __init__(self, test_spec: TestSpec) -> None:
-        self.spec = test_spec
-
-    def filter(self, build_config: BuildConfiguration) -> bool:
-        return build_config.abi in self.spec.abis
-
-
-def enumerate_tests(
-    test_dir: Path,
-    test_src_dir: Path,
-    device_base_dir: PurePosixPath,
-    test_filter: TestFilter,
-    config_filter: ConfigFilter,
-) -> Dict[BuildConfiguration, List[TestCase]]:
-    tests: Dict[BuildConfiguration, List[TestCase]] = {}
-
-    # The tests directory has a directory for each type of test. For example:
-    #
-    #  * build.sh
-    #  * cmake
-    #  * ndk-build
-    #  * test.py
-    #
-    # We need to handle some of these differently. The test.py and build.sh
-    # type tests are build only, so we don't need to run them.
-    test_subdir_class_map: Dict[
-        str,
-        Callable[
-            [Path, Path, PurePosixPath, BuildConfiguration, str, TestFilter],
-            List[TestCase],
-        ],
-    ] = {
-        "cmake": _enumerate_basic_tests,
-        "ndk-build": _enumerate_basic_tests,
-    }
-
-    for build_cfg_str in os.listdir(test_dir):
-        # Ignore TradeFed config files.
-        if not (test_dir / build_cfg_str).is_dir():
-            continue
-        build_cfg = BuildConfiguration.from_string(build_cfg_str)
-        if not config_filter.filter(build_cfg):
-            continue
-
-        if build_cfg not in tests:
-            tests[build_cfg] = []
-
-        for test_type, scan_for_tests in test_subdir_class_map.items():
-            tests[build_cfg].extend(
-                scan_for_tests(
-                    test_dir,
-                    test_src_dir,
-                    device_base_dir,
-                    build_cfg,
-                    test_type,
-                    test_filter,
-                )
-            )
-
-    return tests
diff --git a/ndk/test/filters.py b/ndk/test/filters.py
index 332b044..af0529f 100644
--- a/ndk/test/filters.py
+++ b/ndk/test/filters.py
@@ -26,9 +26,6 @@
 
 
 class TestFilter:
-    # Needed to shut up warnings about `Test*` looking like a unittest test case.
-    __test__ = False
-
     def __init__(self, patterns: List[str]) -> None:
         self.early_filters: List[FilterFunc] = []
         self.late_filters: List[FilterFunc] = []
@@ -37,7 +34,7 @@
 
     def filter(self, test_name: str) -> bool:
         filter_set = self.early_filters
-        if "." in test_name:
+        if '.' in test_name:
             filter_set = self.late_filters
         if not filter_set:
             return True
@@ -75,10 +72,10 @@
         # Note that the way we split the patterns does allow more than one '.'
         # to appear in the full test name. The early pattern will never contain
         # a '.', i.e. the early filter pattern for 'foo.bar.*' is 'foo'.
-        early_pattern = pattern.split(".")[0]
+        early_pattern = pattern.split('.')[0]
         late_pattern = pattern
-        if "." not in pattern:
-            late_pattern = pattern + ".*"
+        if '.' not in pattern:
+            late_pattern = pattern + '.*'
 
         self._add_early_filter(early_pattern)
         self._add_late_filter(late_pattern)
@@ -90,5 +87,5 @@
         self.late_filters.append(FilterFunc(pattern))
 
     @classmethod
-    def from_string(cls, filter_string: Optional[str]) -> "TestFilter":
-        return cls(filter_string.split(",") if filter_string else [])
+    def from_string(cls, filter_string: Optional[str]) -> 'TestFilter':
+        return cls(filter_string.split(',') if filter_string else [])
diff --git a/ndk/test/printers.py b/ndk/test/printers.py
index 2205d9d..922e4bd 100644
--- a/ndk/test/printers.py
+++ b/ndk/test/printers.py
@@ -17,110 +17,69 @@
 
 import os
 import sys
-from typing import Any, Optional, TextIO
+from typing import Optional, TextIO
 
 import ndk.termcolor
 from ndk.test.report import Report
-from ndk.test.result import ResultTranslations, TestResult
+from ndk.test.result import TestResult
 
 
-def format_stats_str(
-    report: Report[Any], tr: ResultTranslations, use_color: bool
-) -> str:
-    pass_label = ndk.termcolor.maybe_color(tr.success, "green", use_color)
-    fail_label = ndk.termcolor.maybe_color(tr.failure, "red", use_color)
-    skip_label = ndk.termcolor.maybe_color(tr.skip, "yellow", use_color)
-    return "{pl} {p}/{t} {fl} {f}/{t} {sl} {s}/{t}".format(
-        pl=pass_label,
-        p=report.num_passed,
-        fl=fail_label,
-        f=report.num_failed,
-        sl=skip_label,
-        s=report.num_skipped,
-        t=report.num_tests,
-    )
+def format_stats_str(report: Report, use_color: bool) -> str:
+    pass_label = ndk.termcolor.maybe_color('PASS', 'green', use_color)
+    fail_label = ndk.termcolor.maybe_color('FAIL', 'red', use_color)
+    skip_label = ndk.termcolor.maybe_color('SKIP', 'yellow', use_color)
+    return '{pl} {p}/{t} {fl} {f}/{t} {sl} {s}/{t}'.format(
+        pl=pass_label, p=report.num_passed,
+        fl=fail_label, f=report.num_failed,
+        sl=skip_label, s=report.num_skipped,
+        t=report.num_tests)
 
 
 class Printer:
     def print_result(self, result: TestResult) -> None:
         raise NotImplementedError
 
-    def print_summary(self, report: Report[Any]) -> None:
+    def print_summary(self, report: Report) -> None:
         raise NotImplementedError
 
 
 class FilePrinter(Printer):
-    def __init__(
-        self,
-        to_file: TextIO,
-        use_color: Optional[bool] = None,
-        show_all: bool = False,
-        quiet: bool = False,
-        result_translations: ResultTranslations = ResultTranslations(),
-    ) -> None:
+    def __init__(self,
+                 to_file: TextIO,
+                 use_color: Optional[bool] = None,
+                 show_all: bool = False,
+                 quiet: bool = False) -> None:
         self.file = to_file
         self.show_all = show_all
         self.quiet = quiet
-        self.result_translations = result_translations
 
         if use_color is None:
-            self.use_color = to_file.isatty() and os.name != "nt"
+            self.use_color = to_file.isatty() and os.name != 'nt'
         else:
             self.use_color = use_color
 
     def print_result(self, result: TestResult) -> None:
         if self.quiet and not result.failed():
             return
-        print(
-            result.to_string(self.result_translations, colored=self.use_color),
-            file=self.file,
-        )
+        print(result.to_string(colored=self.use_color), file=self.file)
 
-    def print_summary(self, report: Report[Any]) -> None:
-        if not report.num_tests:
-            print(
-                ndk.termcolor.maybe_color(
-                    "No tests were run. This usually means you do not have any devices "
-                    "required for this test connected. Check for warnings above about "
-                    "unavailable devices.",
-                    "red",
-                    self.use_color,
-                ),
-                file=self.file,
-            )
-            return
-
+    def print_summary(self, report: Report) -> None:
+        print(file=self.file)
+        formatted = format_stats_str(report, self.use_color)
+        print(formatted, file=self.file)
         for suite, suite_report in report.by_suite().items():
-            stats_str = format_stats_str(
-                suite_report, self.result_translations, self.use_color
-            )
+            stats_str = format_stats_str(suite_report, self.use_color)
             print(file=self.file)
-            print("{}: {}".format(suite, stats_str), file=self.file)
+            print('{}: {}'.format(suite, stats_str), file=self.file)
             for test_report in suite_report.reports:
                 if self.show_all or test_report.result.failed():
-                    print(
-                        test_report.result.to_string(
-                            self.result_translations, colored=self.use_color
-                        ),
-                        file=self.file,
-                    )
-
-        formatted = format_stats_str(report, self.result_translations, self.use_color)
-        print(f"\nTotal: {formatted}", file=self.file)
+                    print(test_report.result.to_string(colored=self.use_color),
+                          file=self.file)
 
 
 class StdoutPrinter(FilePrinter):
-    def __init__(
-        self,
-        use_color: Optional[bool] = None,
-        show_all: bool = False,
-        quiet: bool = False,
-        result_translations: ResultTranslations = ResultTranslations(),
-    ) -> None:
-        super().__init__(
-            sys.stdout,
-            use_color,
-            show_all,
-            quiet,
-            result_translations=result_translations,
-        )
+    def __init__(self,
+                 use_color: Optional[bool] = None,
+                 show_all: bool = False,
+                 quiet: bool = False) -> None:
+        super().__init__(sys.stdout, use_color, show_all, quiet)
diff --git a/ndk/test/report.py b/ndk/test/report.py
index a143072..66db993 100644
--- a/ndk/test/report.py
+++ b/ndk/test/report.py
@@ -14,40 +14,32 @@
 # limitations under the License.
 #
 """Defines the format of test results from the test runner."""
-from __future__ import annotations
+from typing import Callable, Dict, List
 
-from collections.abc import Iterator
-from typing import Callable, Dict, Generic, List, TypeVar, cast
-
-from ndk.test.result import Failure, TestResult
+from ndk.test.result import TestResult
 
 
 class SingleResultReport:
     """Stores the result of a single test with its config info."""
-
     def __init__(self, suite: str, result: TestResult) -> None:
         self.suite = suite
         self.result = result
 
 
-UserDataT = TypeVar("UserDataT")
-
-
-class Report(Generic[UserDataT]):
+class Report:
     """Stores details of a test run.
 
     A "test run" means any number of tests run in any number of (unique)
     configurations.
     """
-
     def __init__(self) -> None:
         self.reports: List[SingleResultReport] = []
 
     def add_result(self, suite: str, result: TestResult) -> None:
         self.reports.append(SingleResultReport(suite, result))
 
-    def by_suite(self) -> Dict[str, Report[UserDataT]]:
-        suite_reports: Dict[str, Report[UserDataT]] = {}
+    def by_suite(self) -> Dict[str, 'Report']:
+        suite_reports: Dict[str, 'Report'] = {}
         for report in self.reports:
             if report.suite not in suite_reports:
                 suite_reports[report.suite] = Report()
@@ -56,7 +48,7 @@
 
     @property
     def successful(self) -> bool:
-        return self.num_failed == 0 and self.num_passed > 0
+        return self.num_failed == 0
 
     @property
     def num_tests(self) -> int:
@@ -74,14 +66,13 @@
     def num_skipped(self) -> int:
         return len(self.all_skipped)
 
-    def iter_failed(self) -> Iterator[SingleResultReport]:
-        for report in self.reports:
-            if report.result.failed():
-                yield report
-
     @property
     def all_failed(self) -> List[SingleResultReport]:
-        return list(self.iter_failed())
+        failures: List[SingleResultReport] = []
+        for report in self.reports:
+            if report.result.failed():
+                failures.append(report)
+        return failures
 
     @property
     def all_passed(self) -> List[SingleResultReport]:
@@ -99,31 +90,20 @@
                 skips.append(report)
         return skips
 
-    def _remove_matching(
-        self, filter_func: Callable[[TestResult], bool]
-    ) -> list[SingleResultReport]:
-        new_list = []
-        removed = []
-        for report in self.reports:
-            if filter_func(report.result):
-                removed.append(report)
-            else:
-                new_list.append(report)
-        self.reports = new_list
-        return removed
-
-    def remove_all_failing_flaky(
-        self, flake_filter: Callable[[TestResult], bool]
-    ) -> List[SingleResultReport]:
+    def remove_all_failing_flaky(self,
+                                 flake_filter: Callable[[TestResult], bool]
+                                 ) -> List[SingleResultReport]:
         """Splits out the flaky tests that failed so they can be rerun.
 
         Any failing tests that are known flaky are removed from the list of
         reports and returned to the caller to be rerun.
         """
-        return self._remove_matching(lambda r: r.failed() and flake_filter(r))
-
-    def remove_all_true_failures(self) -> list[Failure[UserDataT]]:
-        return [
-            cast(Failure[UserDataT], r.result)
-            for r in self._remove_matching(lambda r: isinstance(r, Failure))
-        ]
+        new_list = []
+        flaky = []
+        for report in self.reports:
+            if report.result.failed() and flake_filter(report.result):
+                flaky.append(report)
+            else:
+                new_list.append(report)
+        self.reports = new_list
+        return flaky
diff --git a/ndk/test/result.py b/ndk/test/result.py
index e9ef49e..869c656 100644
--- a/ndk/test/result.py
+++ b/ndk/test/result.py
@@ -14,24 +14,15 @@
 # limitations under the License.
 #
 """Test result classes."""
-from dataclasses import dataclass
-from typing import Any, Generic, TypeVar
+from typing import Any
 
 import ndk.termcolor
 
+
 # TODO: Need to resolve the circular import between this and ndk.test.types.
 Test = Any
 
 
-@dataclass(frozen=True)
-class ResultTranslations:
-    success: str = "PASS"
-    failure: str = "FAIL"
-    skip: str = "SKIP"
-    expected_failure: str = "KNOWN FAIL"
-    unexpected_success: str = "SHOULD FAIL"
-
-
 class TestResult:
     def __init__(self, test: Test):
         self.test = test
@@ -45,32 +36,14 @@
     def failed(self) -> bool:
         raise NotImplementedError
 
-    def to_string(
-        self, tr: ResultTranslations = ResultTranslations(), colored: bool = False
-    ) -> str:
+    def to_string(self, colored: bool = False) -> str:
         raise NotImplementedError
 
 
-UserDataT = TypeVar("UserDataT")
-
-
-class Failure(TestResult, Generic[UserDataT]):
-    def __init__(
-        self,
-        test: Test,
-        message: str,
-        repro_cmd: str | None = None,
-        user_data: UserDataT | None = None,
-    ) -> None:
+class Failure(TestResult):
+    def __init__(self, test: Test, message: str) -> None:
         super().__init__(test)
         self.message = message
-        self.repro_cmd = repro_cmd
-        self._user_data = user_data
-
-    @property
-    def user_data(self) -> UserDataT:
-        assert self._user_data is not None
-        return self._user_data
 
     def passed(self) -> bool:
         return False
@@ -78,12 +51,9 @@
     def failed(self) -> bool:
         return True
 
-    def to_string(
-        self, tr: ResultTranslations = ResultTranslations(), colored: bool = False
-    ) -> str:
-        label = ndk.termcolor.maybe_color(tr.failure, "red", colored)
-        repro = f" {self.repro_cmd}" if self.repro_cmd else ""
-        return f"{label} {self.test}:{repro}\n" f"{self.message}"
+    def to_string(self, colored: bool = False) -> str:
+        label = ndk.termcolor.maybe_color('FAIL', 'red', colored)
+        return f'{label} {self.test.name} [{self.test.config}]: {self.message}'
 
 
 class Success(TestResult):
@@ -93,11 +63,9 @@
     def failed(self) -> bool:
         return False
 
-    def to_string(
-        self, tr: ResultTranslations = ResultTranslations(), colored: bool = False
-    ) -> str:
-        label = ndk.termcolor.maybe_color(tr.success, "green", colored)
-        return f"{label} {self.test}"
+    def to_string(self, colored: bool = False) -> str:
+        label = ndk.termcolor.maybe_color('PASS', 'green', colored)
+        return f'{label} {self.test.name} [{self.test.config}]'
 
 
 class Skipped(TestResult):
@@ -111,17 +79,14 @@
     def failed(self) -> bool:
         return False
 
-    def to_string(
-        self, tr: ResultTranslations = ResultTranslations(), colored: bool = False
-    ) -> str:
-        label = ndk.termcolor.maybe_color(tr.skip, "yellow", colored)
-        return f"{label} {self.test}: {self.reason}"
+    def to_string(self, colored: bool = False) -> str:
+        label = ndk.termcolor.maybe_color('SKIP', 'yellow', colored)
+        return f'{label} {self.test.name} [{self.test.config}]: {self.reason}'
 
 
 class ExpectedFailure(TestResult):
-    def __init__(self, test: Test, message: str, broken_config: str, bug: str) -> None:
+    def __init__(self, test: Test, broken_config: str, bug: str) -> None:
         super().__init__(test)
-        self.message = message
         self.broken_config = broken_config
         self.bug = bug
 
@@ -131,14 +96,10 @@
     def failed(self) -> bool:
         return False
 
-    def to_string(
-        self, tr: ResultTranslations = ResultTranslations(), colored: bool = False
-    ) -> str:
-        label = ndk.termcolor.maybe_color(tr.expected_failure, "yellow", colored)
-        return (
-            f"{label} {self.test}: known failure "
-            f"for {self.broken_config} ({self.bug}): {self.message}"
-        )
+    def to_string(self, colored: bool = False) -> str:
+        label = ndk.termcolor.maybe_color('KNOWN FAIL', 'yellow', colored)
+        return (f'{label} {self.test.name} [{self.test.config}]: '
+                f'known failure for {self.broken_config} ({self.bug})')
 
 
 class UnexpectedSuccess(TestResult):
@@ -153,11 +114,7 @@
     def failed(self) -> bool:
         return True
 
-    def to_string(
-        self, tr: ResultTranslations = ResultTranslations(), colored: bool = False
-    ) -> str:
-        label = ndk.termcolor.maybe_color(tr.unexpected_success, "red", colored)
-        return (
-            f"{label} {self.test}: "
-            f"unexpected success for {self.broken_config} ({self.bug})"
-        )
+    def to_string(self, colored: bool = False) -> str:
+        label = ndk.termcolor.maybe_color('SHOULD FAIL', 'red', colored)
+        return (f'{label} {self.test.name} [{self.test.config}]: '
+                f'unexpected success for {self.broken_config} ({self.bug})')
diff --git a/ndk/test/scanner.py b/ndk/test/scanner.py
new file mode 100644
index 0000000..6830bc5
--- /dev/null
+++ b/ndk/test/scanner.py
@@ -0,0 +1,144 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import absolute_import
+
+import os
+from typing import List, Optional, Set
+
+from ndk.abis import Abi
+import ndk.paths
+from ndk.test.spec import BuildConfiguration
+from ndk.test.types import (
+    CMakeBuildTest,
+    LibcxxTest,
+    NdkBuildTest,
+    PythonBuildTest,
+    ShellBuildTest,
+    Test,
+)
+from ndk.toolchains import LinkerOption
+
+
+class TestScanner:
+    """Creates a Test objects for a given test directory.
+
+    A test scanner is used to turn a test directory into a list of Tests for
+    any of the test types found in the directory.
+    """
+    def find_tests(self, path: str, name: str) -> List[Test]:
+        """Searches a directory for tests.
+
+        Args:
+            path: Path to the test directory.
+            name: Name of the test.
+
+        Returns: List of Tests, possibly empty.
+        """
+        raise NotImplementedError
+
+
+class BuildTestScanner(TestScanner):
+    def __init__(self, ndk_path: str, dist: bool = True) -> None:
+        self.ndk_path = ndk_path
+        self.dist = dist
+        self.build_configurations: Set[BuildConfiguration] = set()
+
+    def add_build_configuration(self, abi: Abi, api: Optional[int],
+                                linker: LinkerOption) -> None:
+        self.build_configurations.add(BuildConfiguration(abi, api, linker))
+
+    def find_tests(self, path: str, name: str) -> List[Test]:
+        # If we have a build.sh, that takes precedence over the Android.mk.
+        build_sh_path = os.path.join(path, 'build.sh')
+        if os.path.exists(build_sh_path):
+            return self.make_build_sh_tests(path, name)
+
+        # Same for test.py
+        build_sh_path = os.path.join(path, 'test.py')
+        if os.path.exists(build_sh_path):
+            return self.make_test_py_tests(path, name)
+
+        # But we can have both ndk-build and cmake tests in the same directory.
+        tests: List[Test] = []
+        android_mk_path = os.path.join(path, 'jni/Android.mk')
+        if os.path.exists(android_mk_path):
+            tests.extend(self.make_ndk_build_tests(path, name))
+
+        cmake_lists_path = os.path.join(path, 'CMakeLists.txt')
+        if os.path.exists(cmake_lists_path):
+            tests.extend(self.make_cmake_tests(path, name))
+        return tests
+
+    def make_build_sh_tests(self, path: str, name: str) -> List[Test]:
+        return [
+            ShellBuildTest(name, path, config, self.ndk_path)
+            for config in self.build_configurations
+        ]
+
+    def make_test_py_tests(self, path: str, name: str) -> List[Test]:
+        return [
+            PythonBuildTest(name, path, config, self.ndk_path)
+            for config in self.build_configurations
+        ]
+
+    def make_ndk_build_tests(self, path: str, name: str) -> List[Test]:
+        return [
+            NdkBuildTest(name, path, config, self.ndk_path, self.dist)
+            for config in self.build_configurations
+        ]
+
+    def make_cmake_tests(self, path: str, name: str) -> List[Test]:
+        return [
+            CMakeBuildTest(name, path, config, self.ndk_path, self.dist)
+            for config in self.build_configurations
+        ]
+
+
+class LibcxxTestScanner(TestScanner):
+    ALL_TESTS: List[str] = []
+    LIBCXX_SRC = ndk.paths.ANDROID_DIR / 'external/libcxx'
+
+    def __init__(self, ndk_path: str) -> None:
+        self.ndk_path = ndk_path
+        self.build_configurations: Set[BuildConfiguration] = set()
+        LibcxxTestScanner.find_all_libcxx_tests()
+
+    def add_build_configuration(self, abi: Abi, api: Optional[int],
+                                linker: LinkerOption) -> None:
+        self.build_configurations.add(BuildConfiguration(abi, api, linker))
+
+    def find_tests(self, path: str, name: str) -> List[Test]:
+        return [
+            LibcxxTest('libc++', path, config, self.ndk_path)
+            for config in self.build_configurations
+        ]
+
+    @classmethod
+    def find_all_libcxx_tests(cls) -> None:
+        # If we instantiate multiple LibcxxTestScanners, we still only need to
+        # initialize this once. We only create these in the main thread, so
+        # there's no risk of race.
+        if cls.ALL_TESTS:
+            return
+
+        test_base_dir = os.path.join(cls.LIBCXX_SRC, 'test')
+
+        for root, _dirs, files in os.walk(test_base_dir):
+            for test_file in files:
+                if test_file.endswith('.cpp'):
+                    test_path = ndk.paths.to_posix_path(os.path.relpath(
+                        os.path.join(root, test_file), test_base_dir))
+                    cls.ALL_TESTS.append(test_path)
diff --git a/ndk/test/spec.py b/ndk/test/spec.py
index 5c13c86..9bbc091 100644
--- a/ndk/test/spec.py
+++ b/ndk/test/spec.py
@@ -14,43 +14,23 @@
 # limitations under the License.
 #
 """Configuration objects for describing test runs."""
-from __future__ import annotations
 
-import enum
-import json
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Any, Dict, Iterable, List, Optional
+from typing import Iterable, List, Optional
 
-import ndk.test.suites
-from ndk.abis import LP32_ABIS, LP64_ABIS, Abi
-
-
-@enum.unique
-class CMakeToolchainFile(enum.Enum):
-    Legacy = "legacy"
-    Default = "new"
-
-
-@enum.unique
-class WeakSymbolsConfig(enum.Enum):
-    WeakAPI = "weakapi"
-    StrictAPI = "strictapi"
+from ndk.abis import Abi
+from ndk.toolchains import LinkerOption
 
 
 class TestOptions:
     """Configuration for how tests should be run."""
 
-    def __init__(
-        self,
-        src_dir: Path,
-        ndk_path: Path,
-        out_dir: Path,
-        test_filter: Optional[str] = None,
-        clean: bool = True,
-        build_report: Optional[str] = None,
-        package_path: Optional[Path] = None,
-    ) -> None:
+    def __init__(self,
+                 src_dir: str,
+                 ndk_path: str,
+                 out_dir: str,
+                 test_filter: str = None,
+                 clean: bool = True,
+                 build_report: str = None) -> None:
         """Initializes a TestOptions object.
 
         Args:
@@ -60,7 +40,6 @@
             test_filter: Test filter string.
             clean: True if the out directory should be cleaned before building.
             build_report: Path to write a build report to, if any.
-            package_path: Path (without extension) to package the tests.
         """
         self.src_dir = src_dir
         self.ndk_path = ndk_path
@@ -68,36 +47,18 @@
         self.test_filter = test_filter
         self.clean = clean
         self.build_report = build_report
-        self.package_path = package_path
 
 
 class TestSpec:
-    """Configuration for which tests should be run on which devices."""
+    """Configuration for which tests should be run."""
 
-    def __init__(
-        self, abis: Iterable[Abi], suites: Iterable[str], devices: Dict[int, List[Abi]]
-    ) -> None:
+    def __init__(self, abis: Iterable[Abi], linkers: Iterable[LinkerOption],
+                 suites: Iterable[str]) -> None:
         self.abis = abis
+        self.linkers = linkers
         self.suites = suites
-        self.devices = devices
-
-    @classmethod
-    def load(cls, path: Path, abis: Optional[Iterable[Abi]] = None) -> TestSpec:
-        with path.open(encoding="utf-8") as config_file:
-            test_config: dict[str, Any] = json.load(config_file)
-        if abis is None:
-            abis = test_config.get("abis", ndk.abis.ALL_ABIS)
-        assert abis is not None
-        suites = test_config.get("suites", ndk.test.suites.ALL_SUITES)
-        devices: Dict[int, List[Abi]] = {}
-        for api, device_abis in test_config["devices"].items():
-            devices[int(api)] = []
-            for abi in device_abis:
-                devices[int(api)].append(Abi(abi))
-        return cls(abis, suites, devices)
 
 
-@dataclass(frozen=True)
 class BuildConfiguration:
     """A configuration for a single test build.
 
@@ -105,54 +66,33 @@
     run.
     """
 
-    abi: Abi
-    # This is always None for the global config while building. Each test will fill in
-    # the appropriate value for the test (based on `APP_PLATFORM` or similar). It is
-    # still a part of the BuildConfiguration class because we do not have separate
-    # classes for build config *input* (the BuildConfiguration created by
-    # TestBuilder.find_tests) and build config *output* (the result decided and
-    # serialized by the test, which needs to be read when the test is later run by
-    # run_tests.py).
-    api: Optional[int]
-    toolchain_file: CMakeToolchainFile
-    weak_symbol: WeakSymbolsConfig
+    def __init__(self, abi: Abi, api: Optional[int],
+                 linker: LinkerOption) -> None:
+        self.abi = abi
+        self.api = api
+        self.linker = linker
 
-    def with_api(self, api: int) -> BuildConfiguration:
-        """Creates a copy of this BuildConfiguration with a new API level.
+    def __eq__(self, other: object) -> bool:
+        assert isinstance(other, BuildConfiguration)
+        if self.abi != other.abi:
+            return False
+        if self.api != other.api:
+            return False
+        if self.linker != other.linker:
+            return False
+        return True
 
-        Args:
-            api: The API level used by the new BuildConfiguration.
-
-        Returns:
-            A copy of this BuildConfiguration with the new API level.
-        """
-        return BuildConfiguration(
-            abi=self.abi,
-            api=api,
-            toolchain_file=self.toolchain_file,
-            weak_symbol=self.weak_symbol,
-        )
+    def __repr__(self) -> str:
+        return f'BuildConfiguration({self.abi}, {self.api}, {self.linker})'
 
     def __str__(self) -> str:
-        return "-".join(
-            [
-                self.abi,
-                str(self.api),
-                self.toolchain_file.value,
-                self.weak_symbol.value,
-            ]
-        )
+        return f'{self.abi}-{self.api}-{self.linker.value}'
 
-    @property
-    def is_lp32(self) -> bool:
-        return self.abi in LP32_ABIS
-
-    @property
-    def is_lp64(self) -> bool:
-        return self.abi in LP64_ABIS
+    def __hash__(self) -> int:
+        return hash(str(self))
 
     @staticmethod
-    def from_string(config_string: str) -> BuildConfiguration:
+    def from_string(config_string: str) -> 'BuildConfiguration':
         """Converts a string into a BuildConfiguration.
 
         Args:
@@ -164,32 +104,26 @@
         Raises:
             ValueError: The given string could not be matched to a TestSpec.
         """
-        abi, _, rest = config_string.partition("-")
-        if abi == "armeabi" and rest.startswith("v7a-"):
-            abi += "-v7a"
-            _, _, rest = rest.partition("-")
-        elif abi == "arm64" and rest.startswith("v8a-"):
-            abi += "-v8a"
-            _, _, rest = rest.partition("-")
+        abi, _, rest = config_string.partition('-')
+        if abi == 'armeabi' and rest.startswith('v7a-'):
+            abi += '-v7a'
+            _, _, rest = rest.partition('-')
+        elif abi == 'arm64' and rest.startswith('v8a-'):
+            abi += '-v8a'
+            _, _, rest = rest.partition('-')
 
-        api_str, toolchain_file_str, weak_symbols_str = rest.split("-")
+        api_str, linker_str = rest.split('-')
         api = int(api_str)
-        toolchain_file = CMakeToolchainFile(toolchain_file_str)
-        weak_symbols = WeakSymbolsConfig(weak_symbols_str)
+        linker = LinkerOption(linker_str)
 
-        return BuildConfiguration(Abi(abi), api, toolchain_file, weak_symbols)
+        return BuildConfiguration(Abi(abi), api, linker)
 
-    def get_extra_ndk_build_flags(self) -> list[str]:
+    def get_extra_ndk_build_flags(self) -> List[str]:
         extra_flags = []
-        extra_flags.append("V=1")
-        if self.weak_symbol == WeakSymbolsConfig.WeakAPI:
-            extra_flags.append("APP_WEAK_API_DEFS=true")
-
+        extra_flags.append('V=1')
         return extra_flags
 
-    def get_extra_cmake_flags(self) -> list[str]:
+    def get_extra_cmake_flags(self) -> List[str]:
         extra_flags = []
-        extra_flags.append("-DCMAKE_VERBOSE_MAKEFILE=ON")
-        if self.weak_symbol == WeakSymbolsConfig.WeakAPI:
-            extra_flags.append("-DANDROID_WEAK_API_DEFS=ON")
+        extra_flags.append('-DCMAKE_VERBOSE_MAKEFILE=ON')
         return extra_flags
diff --git a/ndk/test/suites.py b/ndk/test/suites.py
index bb0eb09..c3e176a 100644
--- a/ndk/test/suites.py
+++ b/ndk/test/suites.py
@@ -16,6 +16,7 @@
 
 
 ALL_SUITES = (
-    "build",
-    "device",
+    'build',
+    'device',
+    'libc++',
 )
diff --git a/ndk/test/test_devices.py b/ndk/test/test_devices.py
index 18cb0bc..f953d83 100644
--- a/ndk/test/test_devices.py
+++ b/ndk/test/test_devices.py
@@ -16,20 +16,20 @@
 """Tests for ndk.test.devices."""
 from __future__ import absolute_import
 
+from typing import List, Optional
 import unittest
-from typing import List
 
-import ndk.test.devices
 from ndk.abis import Abi
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile, WeakSymbolsConfig
+import ndk.test.devices
+import ndk.test.spec
+from ndk.toolchains import LinkerOption
 
 
 class MockDevice(ndk.test.devices.Device):
-    def __init__(self, version: int, abis: List[Abi], supports_mte: bool) -> None:
-        super().__init__("")
+    def __init__(self, version: int, abis: List[Abi]) -> None:
+        super().__init__('')
         self._version = version
         self._abis = abis
-        self._supports_mte = supports_mte
 
     @property
     def abis(self) -> List[Abi]:
@@ -39,54 +39,48 @@
     def version(self) -> int:
         return self._version
 
-    @property
-    def supports_mte(self) -> bool:
-        return self._supports_mte
 
-
-def make_test_build_configuration(abi: Abi, api: int) -> BuildConfiguration:
-    # The CMake toolchain file option is irrelevant for determining device
-    # compatibility.
-    return BuildConfiguration(
-        abi, api, CMakeToolchainFile.Default, WeakSymbolsConfig.WeakAPI
-    )
+class TestBuildConfiguration(ndk.test.spec.BuildConfiguration):
+    def __init__(self, abi: Abi, api: Optional[int]):
+        # Linker option is irrelevant for determining device compatibility.
+        super().__init__(abi, api, LinkerOption.Default)
 
 
 class DeviceTest(unittest.TestCase):
     def test_can_run_build_config(self) -> None:
-        jb_arm = MockDevice(16, [Abi("armeabi-v7a")], False)
-        n_arm = MockDevice(25, [Abi("armeabi-v7a"), Abi("arm64-v8a")], False)
-        n_intel = MockDevice(25, [Abi("x86"), Abi("x86_64")], False)
+        jb_arm = MockDevice(16, [Abi('armeabi-v7a')])
+        n_arm = MockDevice(25, [Abi('armeabi-v7a'), Abi('arm64-v8a')])
+        n_intel = MockDevice(25, [Abi('x86'), Abi('x86_64')])
 
-        jb_arm7 = make_test_build_configuration(Abi("armeabi-v7a"), 16)
+        jb_arm7 = TestBuildConfiguration(Abi('armeabi-v7a'), 16)
         # Too old, no PIE support.
         self.assertTrue(jb_arm.can_run_build_config(jb_arm7))
         self.assertTrue(n_arm.can_run_build_config(jb_arm7))
         # Wrong ABI.
         self.assertFalse(n_intel.can_run_build_config(jb_arm7))
 
-        l_arm7 = make_test_build_configuration(Abi("armeabi-v7a"), 21)
+        l_arm7 = TestBuildConfiguration(Abi('armeabi-v7a'), 21)
         # Too old.
         self.assertFalse(jb_arm.can_run_build_config(l_arm7))
         self.assertTrue(n_arm.can_run_build_config(l_arm7))
         # Wrong ABI.
         self.assertFalse(n_intel.can_run_build_config(l_arm7))
 
-        l_arm64 = make_test_build_configuration(Abi("arm64-v8a"), 21)
+        l_arm64 = TestBuildConfiguration(Abi('arm64-v8a'), 21)
         # Too old, wrong ABI.
         self.assertFalse(jb_arm.can_run_build_config(l_arm64))
         self.assertTrue(n_arm.can_run_build_config(l_arm64))
         # Wrong ABI.
         self.assertFalse(n_intel.can_run_build_config(l_arm64))
 
-        l_intel = make_test_build_configuration(Abi("x86_64"), 21)
+        l_intel = TestBuildConfiguration(Abi('x86_64'), 21)
         # Too old, wrong ABI.
         self.assertFalse(jb_arm.can_run_build_config(l_intel))
         # Wrong ABI.
         self.assertFalse(n_arm.can_run_build_config(l_intel))
         self.assertTrue(n_intel.can_run_build_config(l_intel))
 
-        o_arm7 = make_test_build_configuration(Abi("armeabi-v7a"), 26)
+        o_arm7 = TestBuildConfiguration(Abi('armeabi-v7a'), 26)
         # Too old.
         self.assertFalse(jb_arm.can_run_build_config(o_arm7))
         # Too old.
@@ -94,7 +88,7 @@
         # Too old, wrong ABI.
         self.assertFalse(n_intel.can_run_build_config(o_arm7))
 
-        o_arm64 = make_test_build_configuration(Abi("arm64-v8a"), 26)
+        o_arm64 = TestBuildConfiguration(Abi('arm64-v8a'), 26)
         # Too old.
         self.assertFalse(jb_arm.can_run_build_config(o_arm64))
         # Too old.
@@ -102,7 +96,7 @@
         # Too old, wrong ABI.
         self.assertFalse(n_intel.can_run_build_config(o_arm64))
 
-        o_intel = make_test_build_configuration(Abi("x86_64"), 26)
+        o_intel = TestBuildConfiguration(Abi('x86_64'), 26)
         # Too old, wrong ABI.
         self.assertFalse(jb_arm.can_run_build_config(o_intel))
         # Too old, wrong ABI.
diff --git a/ndk/test/test_filters.py b/ndk/test/test_filters.py
index d663c87..17abbf7 100644
--- a/ndk/test/test_filters.py
+++ b/ndk/test/test_filters.py
@@ -20,34 +20,34 @@
 
 class FilterTest(unittest.TestCase):
     def test_filters(self) -> None:
-        filters = TestFilter.from_string("foo,ba*")
-        self.assertTrue(filters.filter("foo"))
-        self.assertTrue(filters.filter("bar"))
-        self.assertTrue(filters.filter("baz"))
-        self.assertFalse(filters.filter("qux"))
+        filters = TestFilter.from_string('foo,ba*')
+        self.assertTrue(filters.filter('foo'))
+        self.assertTrue(filters.filter('bar'))
+        self.assertTrue(filters.filter('baz'))
+        self.assertFalse(filters.filter('qux'))
 
-        filters.add_filter("woodly.*")
-        filters.add_filter("doodly.b*")
-        filters.add_filter("qu*.b*")
+        filters.add_filter('woodly.*')
+        filters.add_filter('doodly.b*')
+        filters.add_filter('qu*.b*')
 
-        self.assertTrue(filters.filter("foo"))
-        self.assertTrue(filters.filter("foo.bar"))
+        self.assertTrue(filters.filter('foo'))
+        self.assertTrue(filters.filter('foo.bar'))
 
-        self.assertTrue(filters.filter("woodly.bar"))
-        self.assertFalse(filters.filter("woo.bar"))
+        self.assertTrue(filters.filter('woodly.bar'))
+        self.assertFalse(filters.filter('woo.bar'))
 
-        self.assertTrue(filters.filter("doodly"))
-        self.assertTrue(filters.filter("doodly.baz"))
-        self.assertFalse(filters.filter("doodly.qux"))
+        self.assertTrue(filters.filter('doodly'))
+        self.assertTrue(filters.filter('doodly.baz'))
+        self.assertFalse(filters.filter('doodly.qux'))
 
-        self.assertTrue(filters.filter("qux"))
-        self.assertTrue(filters.filter("quux"))
-        self.assertTrue(filters.filter("qux.bar"))
-        self.assertTrue(filters.filter("quux.bar"))
-        self.assertFalse(filters.filter("qux.foo"))
-        self.assertFalse(filters.filter("qx.bar"))
+        self.assertTrue(filters.filter('qux'))
+        self.assertTrue(filters.filter('quux'))
+        self.assertTrue(filters.filter('qux.bar'))
+        self.assertTrue(filters.filter('quux.bar'))
+        self.assertFalse(filters.filter('qux.foo'))
+        self.assertFalse(filters.filter('qx.bar'))
 
     def test_empty_filters(self) -> None:
-        filters = TestFilter.from_string("")
-        self.assertTrue(filters.filter("foo"))
-        self.assertTrue(filters.filter("foo.bar"))
+        filters = TestFilter.from_string('')
+        self.assertTrue(filters.filter('foo'))
+        self.assertTrue(filters.filter('foo.bar'))
diff --git a/ndk/test/test_paths.py b/ndk/test/test_paths.py
index 7459963..6e20079 100644
--- a/ndk/test/test_paths.py
+++ b/ndk/test/test_paths.py
@@ -16,8 +16,9 @@
 """Tests for ndk.paths."""
 from __future__ import absolute_import
 
+import os
 import unittest
-from pathlib import Path
+
 from unittest import mock
 
 import ndk.config
@@ -27,29 +28,29 @@
 
 class GetInstallPathTest(unittest.TestCase):
     def setUp(self) -> None:
-        self.release = "bar"
+        self.release = 'bar'
         self.saved_release = ndk.config.release
         ndk.config.release = self.release
 
     def tearDown(self) -> None:
         ndk.config.release = self.saved_release
 
-    @mock.patch("ndk.paths.get_out_dir")
+    @mock.patch('ndk.paths.get_out_dir')
     def test_inferred_out_dir(self, mock_get_out_dir: mock.Mock) -> None:
         """Tests that the correct path is returned for an inferred out_dir"""
-        out_dir = Path("foo")
+        out_dir = 'foo'
         mock_get_out_dir.return_value = out_dir
-        release = "android-ndk-" + self.release
+        release = 'android-ndk-' + self.release
         self.assertEqual(
             ndk.paths.get_install_path(),
-            out_dir / ndk.hosts.get_default_host().value / release,
-        )
+            os.path.join(out_dir,
+                         ndk.hosts.get_default_host().value, release))
 
     def test_supplied_out_dir(self) -> None:
         """Tests that the correct path is returned for a supplied out_dir"""
-        out_dir = Path("foo")
-        release = "android-ndk-" + self.release
+        out_dir = 'foo'
+        release = 'android-ndk-' + self.release
         self.assertEqual(
-            ndk.paths.get_install_path(Path("foo")),
-            out_dir / ndk.hosts.get_default_host().value / release,
-        )
+            ndk.paths.get_install_path('foo'),
+            os.path.join(out_dir,
+                         ndk.hosts.get_default_host().value, release))
diff --git a/ndk/test/test_report.py b/ndk/test/test_report.py
index 5e80cae..920155b 100644
--- a/ndk/test/test_report.py
+++ b/ndk/test/test_report.py
@@ -21,40 +21,41 @@
 
 
 class MockTest:
-    def __init__(self, name: str = "") -> None:
+    def __init__(self, name: str = '') -> None:
         self.name = name
 
 
 class ReportTest(unittest.TestCase):
     def test_remove_all_failing_flaky(self) -> None:
-        report = ndk.test.report.Report[None]()
+        report = ndk.test.report.Report()
         # Success. Not filtered.
-        report.add_result("build", ndk.test.result.Success(MockTest()))
+        report.add_result('build', ndk.test.result.Success(MockTest()))
 
         # Normal failure. Not filtered.
-        report.add_result("build", ndk.test.result.Failure(MockTest(), "failed"))
+        report.add_result('build', ndk.test.result.Failure(
+            MockTest(), 'failed'))
 
         # Skipped test. Not filtered.
-        report.add_result("build", ndk.test.result.Skipped(MockTest(), "skipped"))
+        report.add_result('build', ndk.test.result.Skipped(
+            MockTest(), 'skipped'))
 
         # Expected failure. Not filtered.
-        report.add_result(
-            "build",
-            ndk.test.result.ExpectedFailure(MockTest(), "failed", "bug", "config"),
-        )
+        report.add_result('build', ndk.test.result.ExpectedFailure(
+            MockTest(), 'bug', 'config'))
 
         # Unexpected success. Not filtered.
-        report.add_result(
-            "build", ndk.test.result.UnexpectedSuccess(MockTest(), "bug", "config")
-        )
+        report.add_result('build', ndk.test.result.UnexpectedSuccess(
+            MockTest(), 'bug', 'config'))
 
         # adb didn't tell us anything. Filtered.
-        report.add_result(
-            "build",
-            ndk.test.result.Failure(
-                MockTest(), "Could not find exit status in shell output."
-            ),
-        )
+        report.add_result('build', ndk.test.result.Failure(
+            MockTest(), 'Could not find exit status in shell output.'))
+
+        # Flaky libc++ tests. Filtered.
+        report.add_result('build', ndk.test.result.Failure(
+            MockTest('libc++.libcxx/thread/foo'), ''))
+        report.add_result('build', ndk.test.result.Failure(
+            MockTest('libc++.std/thread/foo'), ''))
 
         results = report.remove_all_failing_flaky(ndk.run_tests.flake_filter)
-        self.assertEqual(1, len(results))
+        self.assertEqual(3, len(results))
diff --git a/ndk/test/test_spec.py b/ndk/test/test_spec.py
index bac19ed..348390f 100644
--- a/ndk/test/test_spec.py
+++ b/ndk/test/test_spec.py
@@ -15,31 +15,23 @@
 #
 import unittest
 
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile, WeakSymbolsConfig
+import ndk.test.spec
 
 
 class BuildConfigurationTest(unittest.TestCase):
     def test_from_string(self) -> None:
-        config = BuildConfiguration.from_string("armeabi-v7a-16-legacy-strictapi")
-        self.assertEqual("armeabi-v7a", config.abi)
+        config = ndk.test.spec.BuildConfiguration.from_string('armeabi-v7a-16')
+        self.assertEqual('armeabi-v7a', config.abi)
         self.assertEqual(16, config.api)
-        self.assertEqual(CMakeToolchainFile.Legacy, config.toolchain_file)
-        self.assertEqual(WeakSymbolsConfig.StrictAPI, config.weak_symbol)
 
-        config = BuildConfiguration.from_string("arm64-v8a-21-new-strictapi")
-        self.assertEqual("arm64-v8a", config.abi)
+        config = ndk.test.spec.BuildConfiguration.from_string('arm64-v8a-21')
+        self.assertEqual('arm64-v8a', config.abi)
         self.assertEqual(21, config.api)
-        self.assertEqual(CMakeToolchainFile.Default, config.toolchain_file)
-        self.assertEqual(WeakSymbolsConfig.StrictAPI, config.weak_symbol)
 
-        config = BuildConfiguration.from_string("x86-16-new-strictapi")
-        self.assertEqual("x86", config.abi)
+        config = ndk.test.spec.BuildConfiguration.from_string('x86-16')
+        self.assertEqual('x86', config.abi)
         self.assertEqual(16, config.api)
-        self.assertEqual(CMakeToolchainFile.Default, config.toolchain_file)
-        self.assertEqual(WeakSymbolsConfig.StrictAPI, config.weak_symbol)
 
-        config = BuildConfiguration.from_string("x86_64-21-new-weakapi")
-        self.assertEqual("x86_64", config.abi)
+        config = ndk.test.spec.BuildConfiguration.from_string('x86_64-21')
+        self.assertEqual('x86_64', config.abi)
         self.assertEqual(21, config.api)
-        self.assertEqual(CMakeToolchainFile.Default, config.toolchain_file)
-        self.assertEqual(WeakSymbolsConfig.WeakAPI, config.weak_symbol)
diff --git a/ndk/test/test_workqueue.py b/ndk/test/test_workqueue.py
index cc65ebf..5af032f 100644
--- a/ndk/test/test_workqueue.py
+++ b/ndk/test/test_workqueue.py
@@ -16,16 +16,15 @@
 """Tests for ndk.workqueue."""
 import multiprocessing
 import os
+from queue import Queue
 import signal
 import sys
-import time
-import unittest
-from queue import Queue
 from threading import Event
 from types import FrameType
-from typing import Optional
+import time
+import unittest
 
-from ndk.workqueue import BasicWorkQueue, TaskError, Worker, WorkQueue
+from ndk.workqueue import DummyWorkQueue, TaskError, Worker, WorkQueue
 
 
 def put(_worker: Worker, i: int) -> int:
@@ -35,7 +34,6 @@
 
 class Functor:
     """Functor that returns the argument passed to the constructor."""
-
     def __init__(self, value: int) -> None:
         self.value = value
 
@@ -48,21 +46,20 @@
     event.wait()
 
 
-def update_status(
-    worker: Worker, ready_event: Event, finish_event: Event, new_status: str
-) -> None:
+def update_status(worker: Worker, ready_event: Event, finish_event: Event,
+                  new_status: str) -> None:
     """Updates the worker's status and waits for an event before finishing."""
     worker.status = new_status
     ready_event.set()
     finish_event.wait()
 
 
-def sigterm_handler(_signum: int, _trace: Optional[FrameType]) -> None:
+def sigterm_handler(_signum: int, _trace: FrameType) -> None:
     """Raises SystemExit."""
     sys.exit()
 
 
-def sleep_until_sigterm(pid_queue: Queue[int]) -> None:
+def sleep_until_sigterm(pid_queue: Queue) -> None:
     """Sleeps until signalled, then passes the PID through the queue."""
     signal.signal(signal.SIGTERM, sigterm_handler)
     try:
@@ -72,7 +69,7 @@
         pid_queue.put(os.getpid())
 
 
-def spawn_child(_worker: Worker, pid_queue: Queue[int]) -> None:
+def spawn_child(_worker: Worker, pid_queue: Queue) -> None:
     """Spawns a child process to check behavior of terminate().
 
     The PIDs of both processes are returned via the pid_queue, and then both
@@ -85,14 +82,13 @@
     sleep_until_sigterm(pid_queue)
 
 
-def raise_error(_worker: Worker) -> None:
+def raise_error() -> None:
     """Raises a RuntimeError to be re-raised in the caller."""
-    raise RuntimeError("Error in child")
+    raise RuntimeError('Error in child')
 
 
 class WorkQueueTest(unittest.TestCase):
     """Tests for WorkQueue."""
-
     def test_put_func(self) -> None:
         """Test that we can pass a function to the queue and get results."""
         workqueue = WorkQueue(4)
@@ -150,9 +146,9 @@
         ready_event = manager.Event()
         finish_event = manager.Event()
         self.assertEqual(Worker.IDLE_STATUS, workqueue.workers[0].status)
-        workqueue.add_task(update_status, ready_event, finish_event, "working")
+        workqueue.add_task(update_status, ready_event, finish_event, 'working')
         ready_event.wait()
-        self.assertEqual("working", workqueue.workers[0].status)
+        self.assertEqual('working', workqueue.workers[0].status)
         finish_event.set()
         workqueue.get_result()
         self.assertEqual(Worker.IDLE_STATUS, workqueue.workers[0].status)
@@ -195,12 +191,11 @@
             workqueue.join()
 
 
-class BasicWorkQueueTest(unittest.TestCase):
-    """Tests for BasicWorkQueue."""
-
+class DummyWorkQueueTest(unittest.TestCase):
+    """Tests for DummyWorkQueue."""
     def test_put_func(self) -> None:
         """Test that we can pass a function to the queue and get results."""
-        workqueue: BasicWorkQueue[int] = BasicWorkQueue()
+        workqueue = DummyWorkQueue()
 
         workqueue.add_task(put, 1)
         workqueue.add_task(put, 2)
@@ -216,7 +211,7 @@
 
     def test_put_functor(self) -> None:
         """Test that we can pass a functor to the queue and get results."""
-        workqueue: BasicWorkQueue[int] = BasicWorkQueue()
+        workqueue = DummyWorkQueue()
 
         workqueue.add_task(Functor(1))
         workqueue.add_task(Functor(2))
@@ -246,7 +241,7 @@
 
     def test_subprocess_exception(self) -> None:
         """Tests that exceptions raised in the task are re-raised."""
-        workqueue: BasicWorkQueue[None] = BasicWorkQueue()
+        workqueue = DummyWorkQueue()
 
         try:
             workqueue.add_task(raise_error)
diff --git a/ndk/test/types.py b/ndk/test/types.py
new file mode 100644
index 0000000..92c4a4f
--- /dev/null
+++ b/ndk/test/types.py
@@ -0,0 +1,777 @@
+#
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import fnmatch
+import imp
+import logging
+import multiprocessing
+import os
+from pathlib import Path
+import re
+import shutil
+import subprocess
+from typing import (
+    List,
+    Optional,
+    TextIO,
+    Tuple,
+    Union,
+)
+import xml.etree.ElementTree
+
+from ndk.abis import Abi
+import ndk.ansi
+import ndk.ext.os
+import ndk.ext.shutil
+import ndk.ext.subprocess
+import ndk.hosts
+import ndk.ndkbuild
+import ndk.paths
+from ndk.test.config import LibcxxTestConfig, TestConfig
+from ndk.test.filters import TestFilter
+from ndk.test.spec import BuildConfiguration
+from ndk.test.result import Failure, Skipped, Success, TestResult
+from ndk.toolchains import LinkerOption
+
+
+def logger() -> logging.Logger:
+    """Return the logger for this module."""
+    return logging.getLogger(__name__)
+
+
+def _get_jobs_args() -> List[str]:
+    cpus = multiprocessing.cpu_count()
+    return [f'-j{cpus}', f'-l{cpus}']
+
+
+def _prep_build_dir(src_dir: str, out_dir: str) -> None:
+    if os.path.exists(out_dir):
+        shutil.rmtree(out_dir)
+    shutil.copytree(src_dir, out_dir)
+
+
+class Test:
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str) -> None:
+        self.name = name
+        self.test_dir = test_dir
+        self.config = config
+        self.ndk_path = ndk_path
+
+    def get_test_config(self) -> TestConfig:
+        return TestConfig.from_test_dir(self.test_dir)
+
+    def run(self, obj_dir: str, dist_dir: str,
+            test_filters: TestFilter) -> Tuple[TestResult, List['Test']]:
+        raise NotImplementedError
+
+    def is_negative_test(self) -> bool:
+        raise NotImplementedError
+
+    def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
+        return self.get_test_config().build_broken(self)
+
+    def check_unsupported(self) -> Optional[str]:
+        return self.get_test_config().build_unsupported(self)
+
+    def get_build_dir(self, out_dir: str) -> str:
+        raise NotImplementedError
+
+    def __str__(self) -> str:
+        return f'{self.name} [{self.config}]'
+
+
+class BuildTest(Test):
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str) -> None:
+        super().__init__(name, test_dir, config, ndk_path)
+
+        if self.api is None:
+            raise ValueError
+
+    @property
+    def abi(self) -> Abi:
+        return self.config.abi
+
+    @property
+    def api(self) -> Optional[int]:
+        return self.config.api
+
+    @property
+    def platform(self) -> Optional[int]:
+        return self.api
+
+    @property
+    def ndk_build_flags(self) -> List[str]:
+        flags = self.config.get_extra_ndk_build_flags()
+        if flags is None:
+            flags = []
+        return flags + self.get_extra_ndk_build_flags()
+
+    @property
+    def cmake_flags(self) -> List[str]:
+        flags = self.config.get_extra_cmake_flags()
+        if flags is None:
+            flags = []
+        return flags + self.get_extra_cmake_flags()
+
+    def run(self, obj_dir: str, dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        raise NotImplementedError
+
+    def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
+        return self.get_test_config().build_broken(self)
+
+    def check_unsupported(self) -> Optional[str]:
+        return self.get_test_config().build_unsupported(self)
+
+    def is_negative_test(self) -> bool:
+        return self.get_test_config().is_negative_test()
+
+    def get_extra_cmake_flags(self) -> List[str]:
+        return self.get_test_config().extra_cmake_flags()
+
+    def get_extra_ndk_build_flags(self) -> List[str]:
+        return self.get_test_config().extra_ndk_build_flags()
+
+
+class PythonBuildTest(BuildTest):
+    """A test that is implemented by test.py.
+
+    A test.py test has a test.py file in its root directory. This module
+    contains a run_test function which returns a tuple of `(boolean_success,
+    string_failure_message)` and takes the following kwargs (all of which
+    default to None):
+
+    abi: ABI to test as a string.
+    platform: Platform to build against as a string.
+    ndk_build_flags: Additional build flags that should be passed to ndk-build
+                     if invoked as a list of strings.
+    """
+
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str) -> None:
+        api = config.api
+        if api is None:
+            api = ndk.abis.min_api_for_abi(config.abi)
+        config = ndk.test.spec.BuildConfiguration(config.abi, api,
+                                                  config.linker)
+        super().__init__(name, test_dir, config, ndk_path)
+
+        if self.abi not in ndk.abis.ALL_ABIS:
+            raise ValueError('{} is not a valid ABI'.format(self.abi))
+
+        try:
+            assert self.api is not None
+            int(self.api)
+        except ValueError:
+            raise ValueError(f'{self.api} is not a valid API number')
+
+        # Not a ValueError for this one because it should be impossible. This
+        # is actually a computed result from the config we're passed.
+        assert self.ndk_build_flags is not None
+
+    def get_build_dir(self, out_dir: str) -> str:
+        return os.path.join(out_dir, str(self.config), 'test.py', self.name)
+
+    def run(self, obj_dir: str, _dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        build_dir = self.get_build_dir(obj_dir)
+        logger().info('Building test: %s', self.name)
+        _prep_build_dir(self.test_dir, build_dir)
+        with ndk.ext.os.cd(build_dir):
+            module = imp.load_source('test', 'test.py')
+            assert self.platform is not None
+            success, failure_message = module.run_test(  # type: ignore
+                self.ndk_path, self.abi, self.platform, self.config.linker, self.ndk_build_flags)
+            if success:
+                return Success(self), []
+            else:
+                return Failure(self, failure_message), []
+
+
+class ShellBuildTest(BuildTest):
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str) -> None:
+        api = config.api
+        if api is None:
+            api = ndk.abis.min_api_for_abi(config.abi)
+        config = ndk.test.spec.BuildConfiguration(config.abi, api,
+                                                  config.linker)
+        super().__init__(name, test_dir, config, ndk_path)
+
+    def get_build_dir(self, out_dir: str) -> str:
+        return os.path.join(out_dir, str(self.config), 'build.sh', self.name)
+
+    def run(self, obj_dir: str, _dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        build_dir = self.get_build_dir(obj_dir)
+        logger().info('Building test: %s', self.name)
+        if os.name == 'nt':
+            reason = 'build.sh tests are not supported on Windows'
+            return Skipped(self, reason), []
+        else:
+            assert self.api is not None
+            result = _run_build_sh_test(self, build_dir, self.test_dir,
+                                        self.ndk_path, self.ndk_build_flags,
+                                        self.abi, self.api, self.config.linker)
+            return result, []
+
+
+def _run_build_sh_test(test: ShellBuildTest, build_dir: str, test_dir: str,
+                       ndk_path: str, ndk_build_flags: List[str], abi: Abi,
+                       platform: int, linker: LinkerOption) -> TestResult:
+    _prep_build_dir(test_dir, build_dir)
+    with ndk.ext.os.cd(build_dir):
+        build_cmd = ['bash', 'build.sh'] + _get_jobs_args() + ndk_build_flags
+        test_env = dict(os.environ)
+        test_env['NDK'] = ndk_path
+        if abi is not None:
+            test_env['APP_ABI'] = abi
+        test_env['APP_PLATFORM'] = f'android-{platform}'
+        test_env['APP_LD'] = linker.value
+        rc, out = ndk.ext.subprocess.call_output(
+            build_cmd, env=test_env, encoding='utf-8')
+        if rc == 0:
+            return Success(test)
+        else:
+            return Failure(test, out)
+
+
+def _platform_from_application_mk(test_dir: str) -> Optional[int]:
+    """Determine target API level from a test's Application.mk.
+
+    Args:
+        test_dir: Directory of the test to read.
+
+    Returns:
+        Integer portion of APP_PLATFORM if found, else None.
+
+    Raises:
+        ValueError: Found an unexpected value for APP_PLATFORM.
+    """
+    application_mk = os.path.join(test_dir, 'jni/Application.mk')
+    if not os.path.exists(application_mk):
+        return None
+
+    with open(application_mk) as application_mk_file:
+        for line in application_mk_file:
+            if line.startswith('APP_PLATFORM'):
+                _, platform_str = line.split(':=')
+                break
+        else:
+            return None
+
+    platform_str = platform_str.strip()
+    if not platform_str.startswith('android-'):
+        raise ValueError(platform_str)
+
+    _, api_level_str = platform_str.split('-')
+    return int(api_level_str)
+
+
+def _get_or_infer_app_platform(platform_from_user: Optional[int],
+                               test_dir: str, abi: Abi) -> int:
+    """Determines the platform level to use for a test using ndk-build.
+
+    Choose the platform level from, in order of preference:
+    1. Value given as argument.
+    2. APP_PLATFORM from jni/Application.mk.
+    3. Default value for the target ABI.
+
+    Args:
+        platform_from_user: A user provided platform level or None.
+        test_dir: The directory containing the ndk-build project.
+        abi: The ABI being targeted.
+
+    Returns:
+        The platform version the test should build against.
+    """
+    if platform_from_user is not None:
+        return platform_from_user
+
+    minimum_version = ndk.abis.min_api_for_abi(abi)
+    platform_from_application_mk = _platform_from_application_mk(test_dir)
+    if platform_from_application_mk is not None:
+        if platform_from_application_mk >= minimum_version:
+            return platform_from_application_mk
+
+    return minimum_version
+
+
+class NdkBuildTest(BuildTest):
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str, dist: bool) -> None:
+        api = _get_or_infer_app_platform(config.api, test_dir, config.abi)
+        config = ndk.test.spec.BuildConfiguration(config.abi, api,
+                                                  config.linker)
+        super().__init__(name, test_dir, config, ndk_path)
+        self.dist = dist
+
+    def get_dist_dir(self, obj_dir: str, dist_dir: str) -> str:
+        if self.dist:
+            return self.get_build_dir(dist_dir)
+        else:
+            return os.path.join(self.get_build_dir(obj_dir), 'dist')
+
+    def get_build_dir(self, out_dir: str) -> str:
+        return os.path.join(out_dir, str(self.config), 'ndk-build', self.name)
+
+    def run(self, obj_dir: str, dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        logger().info('Building test: %s', self.name)
+        obj_dir = self.get_build_dir(obj_dir)
+        dist_dir = self.get_dist_dir(obj_dir, dist_dir)
+        assert self.api is not None
+        result = _run_ndk_build_test(self, obj_dir, dist_dir, self.test_dir,
+                                     self.ndk_path, self.ndk_build_flags,
+                                     self.abi, self.api, self.config.linker)
+        return result, []
+
+
+def _run_ndk_build_test(test: NdkBuildTest, obj_dir: str, dist_dir: str,
+                        test_dir: str, ndk_path: str,
+                        ndk_build_flags: List[str], abi: Abi,
+                        platform: int, linker: LinkerOption) -> TestResult:
+    _prep_build_dir(test_dir, obj_dir)
+    with ndk.ext.os.cd(obj_dir):
+        args = [
+            f'APP_ABI={abi}',
+            f'APP_PLATFORM=android-{platform}',
+            f'APP_LD={linker.value}',
+            f'NDK_LIBS_OUT={dist_dir}',
+        ] + _get_jobs_args()
+        rc, out = ndk.ndkbuild.build(ndk_path, args + ndk_build_flags)
+        if rc == 0:
+            return Success(test)
+        else:
+            return Failure(test, out)
+
+
+class CMakeBuildTest(BuildTest):
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str, dist: bool) -> None:
+        api = _get_or_infer_app_platform(config.api, test_dir, config.abi)
+        config = ndk.test.spec.BuildConfiguration(config.abi, api,
+                                                  config.linker)
+        super().__init__(name, test_dir, config, ndk_path)
+        self.dist = dist
+
+    def get_dist_dir(self, obj_dir: str, dist_dir: str) -> str:
+        if self.dist:
+            return self.get_build_dir(dist_dir)
+        else:
+            return os.path.join(self.get_build_dir(obj_dir), 'dist')
+
+    def get_build_dir(self, out_dir: str) -> str:
+        return os.path.join(out_dir, str(self.config), 'cmake', self.name)
+
+    def run(self, obj_dir: str, dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        obj_dir = self.get_build_dir(obj_dir)
+        dist_dir = self.get_dist_dir(obj_dir, dist_dir)
+        logger().info('Building test: %s', self.name)
+        assert self.api is not None
+        result = _run_cmake_build_test(self, obj_dir, dist_dir, self.test_dir,
+                                       self.ndk_path, self.cmake_flags,
+                                       self.abi, self.api, self.config.linker)
+        return result, []
+
+
+def _run_cmake_build_test(test: CMakeBuildTest, obj_dir: str, dist_dir: str,
+                          test_dir: str, ndk_path: str, cmake_flags: List[str],
+                          abi: str, platform: int,
+                          linker: LinkerOption) -> TestResult:
+    _prep_build_dir(test_dir, obj_dir)
+
+    # Add prebuilts to PATH.
+    prebuilts_host_tag = ndk.hosts.get_default_host().value + '-x86'
+    prebuilts_bin = ndk.paths.android_path(
+        'prebuilts', 'cmake', prebuilts_host_tag, 'bin')
+    env_path = prebuilts_bin + os.pathsep + os.environ['PATH']
+
+    # Fail if we don't have a working cmake executable, either from the
+    # prebuilts, or from the SDK, or if a new enough version is installed.
+    cmake_bin = shutil.which('cmake', path=env_path)
+    if cmake_bin is None:
+        return Failure(test, 'cmake executable not found')
+
+    out = subprocess.check_output([cmake_bin, '--version']).decode('utf-8')
+    version_pattern = r'cmake version (\d+)\.(\d+)\.'
+    m = re.match(version_pattern, out)
+    if m is None:
+        raise RuntimeError('Unable to determine CMake version.')
+    version = [int(v) for v in m.groups()]
+    if version < [3, 6]:
+        return Failure(test, 'cmake 3.6 or above required')
+
+    # Also require a working ninja executable.
+    ninja_bin = shutil.which('ninja', path=env_path)
+    if ninja_bin is None:
+        return Failure(test, 'ninja executable not found')
+    rc, _ = ndk.ext.subprocess.call_output([ninja_bin, '--version'])
+    if rc != 0:
+        return Failure(test, 'ninja --version failed')
+
+    toolchain_file = os.path.join(ndk_path, 'build', 'cmake',
+                                  'android.toolchain.cmake')
+    abi_obj_dir = os.path.join(obj_dir, abi)
+    abi_lib_dir = os.path.join(dist_dir, abi)
+    args = [
+        f'-H{obj_dir}',
+        f'-B{abi_obj_dir}',
+        f'-DCMAKE_TOOLCHAIN_FILE={toolchain_file}',
+        f'-DANDROID_ABI={abi}',
+        f'-DANDROID_LD={linker.value}',
+        f'-DCMAKE_RUNTIME_OUTPUT_DIRECTORY={abi_lib_dir}',
+        f'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={abi_lib_dir}',
+        '-GNinja',
+        f'-DCMAKE_MAKE_PROGRAM={ninja_bin}',
+    ]
+    if platform is not None:
+        args.append('-DANDROID_PLATFORM=android-{}'.format(platform))
+    rc, out = ndk.ext.subprocess.call_output(
+        [cmake_bin] + cmake_flags + args, encoding='utf-8')
+    if rc != 0:
+        return Failure(test, out)
+    rc, out = ndk.ext.subprocess.call_output(
+        [cmake_bin, '--build', abi_obj_dir, '--'] + _get_jobs_args(),
+        encoding='utf-8')
+    if rc != 0:
+        return Failure(test, out)
+    return Success(test)
+
+
+def get_xunit_reports(xunit_file: Path, test_base_dir: str,
+                      config: BuildConfiguration, ndk_path: str) -> List[Test]:
+    tree = xml.etree.ElementTree.parse(str(xunit_file))
+    root = tree.getroot()
+    cases = root.findall('.//testcase')
+
+    reports: List[Test] = []
+    for test_case in cases:
+        mangled_test_dir = test_case.get('classname')
+
+        # The classname is the path from the root of the libc++ test directory
+        # to the directory containing the test (prefixed with 'libc++.')...
+        mangled_path = '/'.join([mangled_test_dir, test_case.get('name')])
+
+        # ... that has had '.' in its path replaced with '_' because xunit.
+        test_matches = find_original_libcxx_test(mangled_path)
+        if not test_matches:
+            raise RuntimeError('Found no matches for test ' + mangled_path)
+        if len(test_matches) > 1:
+            raise RuntimeError('Found multiple matches for test {}: {}'.format(
+                mangled_path, test_matches))
+        assert len(test_matches) == 1
+
+        # We found a unique path matching the xunit class/test name.
+        name = test_matches[0]
+        test_dir = os.path.dirname(name)[len('libc++.'):]
+
+        failure_nodes = test_case.findall('failure')
+        if not failure_nodes:
+            reports.append(XunitSuccess(
+                name, test_base_dir, test_dir, config, ndk_path))
+            continue
+
+        if len(failure_nodes) != 1:
+            msg = ('Could not parse XUnit output: test case does not have a '
+                   'unique failure node: {}'.format(name))
+            raise RuntimeError(msg)
+
+        failure_node = failure_nodes[0]
+        failure_text = failure_node.text
+        assert failure_text is not None
+        reports.append(XunitFailure(
+            name, test_base_dir, test_dir, failure_text, config, ndk_path))
+    return reports
+
+
+def get_lit_cmd() -> Optional[List[str]]:
+    # The build server doesn't install lit to a virtualenv, so use it from the
+    # source location if possible.
+    lit_path = ndk.paths.android_path('external/llvm/utils/lit/lit.py')
+    if os.path.exists(lit_path):
+        return ['python', lit_path]
+    elif shutil.which('lit'):
+        return ['lit']
+    return None
+
+
+def find_original_libcxx_test(name: str) -> List[str]:
+    """Finds the original libc++ test file given the xunit test name.
+
+    LIT mangles test names to replace all periods with underscores because
+    xunit. This returns all tests that could possibly match the xunit test
+    name.
+    """
+
+    name = ndk.paths.to_posix_path(name)
+
+    # LIT special cases tests in the root of the test directory (such as
+    # test/nothing_to_do.pass.cpp) as "libc++.libc++/$TEST_FILE.pass.cpp" for
+    # some reason. Strip it off so we can find the tests.
+    if name.startswith('libc++.libc++/'):
+        name = 'libc++.' + name[len('libc++.libc++/'):]
+
+    test_prefix = 'libc++.'
+    if not name.startswith(test_prefix):
+        raise ValueError('libc++ test name must begin with "libc++."')
+
+    name = name[len(test_prefix):]
+    test_pattern = name.replace('_', '?')
+    matches = []
+
+    # On Windows, a multiprocessing worker process does not inherit ALL_TESTS,
+    # so we must scan libc++ tests in each worker.
+
+    # ndk.test.scanner is not explicitly imported, which messes with mypy, but
+    # works. We can't add the import because then there's a cyclic dependency
+    # between this module and ndk.test.scanner. We'll need to refactor to fix
+    # that.
+    ndk.test.scanner.LibcxxTestScanner.find_all_libcxx_tests()  # type: ignore
+
+    all_libcxx_tests = ndk.test.scanner.LibcxxTestScanner.ALL_TESTS  # type: ignore
+    for match in fnmatch.filter(all_libcxx_tests, test_pattern):
+        matches.append(test_prefix + match)
+    return matches
+
+
+class LibcxxTest(Test):
+    def __init__(self, name: str, test_dir: str, config: BuildConfiguration,
+                 ndk_path: str) -> None:
+        if config.api is None:
+            config.api = ndk.abis.min_api_for_abi(config.abi)
+
+        super().__init__(name, test_dir, config, ndk_path)
+
+    @property
+    def abi(self) -> Abi:
+        return self.config.abi
+
+    @property
+    def api(self) -> Optional[int]:
+        return self.config.api
+
+    def get_build_dir(self, out_dir: str) -> str:
+        return os.path.join(out_dir, str(self.config), 'libcxx', self.name)
+
+    def run_lit(self, lit: List[str], ndk_path: Path, libcxx_src: Path,
+                libcxx_install: Path, build_dir: str,
+                filters: List[str]) -> None:
+        device_dir = '/data/local/tmp/libcxx'
+
+        arch = ndk.abis.abi_to_arch(self.abi)
+        host_tag = ndk.hosts.get_host_tag(self.ndk_path)
+        triple = ndk.abis.arch_to_triple(arch)
+        toolchain = ndk.abis.arch_to_toolchain(arch)
+
+        replacements = [
+            ('abi', self.abi),
+            ('api', self.api),
+            ('arch', arch),
+            ('host_tag', host_tag),
+            ('libcxx_install', libcxx_install),
+            ('libcxx_src', libcxx_src),
+            ('linker', self.config.linker.value),
+            ('ndk_path', ndk_path),
+            ('toolchain', toolchain),
+            ('triple', f'{triple}{self.api}'),
+            ('build_dir', build_dir),
+        ]
+        lit_cfg_args = []
+        for key, value in replacements:
+            lit_cfg_args.append(f'--param={key}={value}')
+
+        xunit_output = os.path.join(build_dir, 'xunit.xml')
+
+        lit_args = lit + [
+            '-sv',
+            '--param=device_dir=' + device_dir,
+            '--param=build_only=True',
+            '--no-progress-bar',
+            '--show-all',
+            '--xunit-xml-output=' + xunit_output,
+        ] + lit_cfg_args
+
+        default_test_path = os.path.join(libcxx_src, 'test')
+        test_paths = list(filters)
+        if not test_paths:
+            test_paths.append(default_test_path)
+        for test_path in test_paths:
+            lit_args.append(test_path)
+
+        # Ignore the exit code. We do most XFAIL processing outside the test
+        # runner so expected failures in the test runner will still cause a
+        # non-zero exit status. This "test" only fails if we encounter a Python
+        # exception. Exceptions raised from our code are already caught by the
+        # test runner. If that happens in LIT, the xunit output will not be
+        # valid and we'll fail get_xunit_reports and raise an exception anyway.
+        with open(os.devnull, 'w') as dev_null:
+            stdout: Optional[TextIO] = dev_null
+            stderr: Optional[TextIO] = dev_null
+            if logger().isEnabledFor(logging.INFO):
+                stdout = None
+                stderr = None
+            subprocess.call(lit_args, stdout=stdout, stderr=stderr)
+
+    def run(self, obj_dir: str, dist_dir: str,
+            test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        lit = get_lit_cmd()
+        if lit is None:
+            return Failure(self, 'Could not find lit'), []
+
+        libcxx_src = ndk.paths.ANDROID_DIR / 'external/libcxx'
+        if not libcxx_src.exists():
+            return Failure(self,
+                           f'Expected libc++ directory at {libcxx_src}'), []
+
+        build_dir = self.get_build_dir(dist_dir)
+
+        if not os.path.exists(build_dir):
+            os.makedirs(build_dir)
+
+        xunit_output = Path(build_dir) / 'xunit.xml'
+        libcxx_test_path = libcxx_src / 'test'
+        ndk_path = Path(self.ndk_path)
+        libcxx_install = (ndk_path / 'sources/cxx-stl/llvm-libc++' / 'libs' /
+                          str(self.config.abi))
+        libcxx_so_path = libcxx_install / 'libc++_shared.so'
+        shutil.copy2(str(libcxx_so_path), build_dir)
+
+        # The libc++ test runner's filters are path based. Assemble the path to
+        # the test based on the late_filters (early filters for a libc++ test
+        # would be simply "libc++", so that's not interesting at this stage).
+        filters = []
+        for late_filter in test_filters.late_filters:
+            filter_pattern = late_filter.pattern
+            if not filter_pattern.startswith('libc++.'):
+                continue
+
+            _, _, path = filter_pattern.partition('.')
+            if not os.path.isabs(path):
+                path = os.path.join(libcxx_test_path, path)
+
+            # If we have a filter like "libc++.std", we'll run everything in
+            # std, but all our XunitReport "tests" will be filtered out.  Make
+            # sure we have something usable.
+            if path.endswith('*'):
+                # But the libc++ test runner won't like that, so strip it.
+                path = path[:-1]
+            elif not os.path.isfile(path):
+                raise RuntimeError(f'{path} does not exist')
+
+            filters.append(path)
+        self.run_lit(lit, ndk_path, libcxx_src, libcxx_install, build_dir,
+                     filters)
+
+        for root, _, files in os.walk(libcxx_test_path):
+            for test_file in files:
+                if not test_file.endswith('.dat'):
+                    continue
+                test_relpath = os.path.relpath(root, libcxx_test_path)
+                dest_dir = os.path.join(build_dir, test_relpath)
+                if not os.path.exists(dest_dir):
+                    continue
+
+                shutil.copy2(os.path.join(root, test_file), dest_dir)
+
+        # We create a bunch of fake tests that report the status of each
+        # individual test in the xunit report.
+        test_reports = get_xunit_reports(
+            xunit_output, self.test_dir, self.config, self.ndk_path)
+
+        return Success(self), test_reports
+
+    # pylint: disable=no-self-use
+    def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
+        # Actual results are reported individually by pulling them out of the
+        # xunit output. This just reports the status of the overall test run,
+        # which should be passing.
+        return None, None
+
+    def check_unsupported(self) -> Optional[str]:
+        return None
+
+    def is_negative_test(self) -> bool:
+        return False
+    # pylint: enable=no-self-use
+
+
+class XunitResult(Test):
+    """Fake tests so we can show a result for each libc++ test.
+
+    We create these by parsing the xunit XML output from the libc++ test
+    runner. For each result, we create an XunitResult "test" that simply
+    returns a result for the xunit status.
+
+    We don't have an ExpectedFailure form of the XunitResult because that is
+    already handled for us by the libc++ test runner.
+    """
+
+    def __init__(self, name: str, test_base_dir: str, test_dir: str,
+                 config: BuildConfiguration, ndk_path: str) -> None:
+        super().__init__(name, test_dir, config, ndk_path)
+        self.test_base_dir = test_base_dir
+
+    @property
+    def case_name(self) -> str:
+        return os.path.splitext(os.path.basename(self.name))[0]
+
+    def run(self, _out_dir: str, _dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        raise NotImplementedError
+
+    def get_test_config(self) -> TestConfig:
+        test_config_dir = os.path.join(self.test_base_dir, self.test_dir)
+        return LibcxxTestConfig.from_test_dir(test_config_dir)
+
+    def check_broken(self) -> Union[Tuple[None, None], Tuple[str, str]]:
+        config, bug = self.get_test_config().build_broken(self)
+        if config is not None:
+            return config, bug
+        return None, None
+
+    # pylint: disable=no-self-use
+    def check_unsupported(self) -> Optional[str]:
+        return None
+
+    def is_negative_test(self) -> bool:
+        return False
+    # pylint: enable=no-self-use
+
+
+class XunitSuccess(XunitResult):
+    def get_build_dir(self, out_dir: str) -> str:
+        raise NotImplementedError
+
+    def run(self, _out_dir: str, _dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        return Success(self), []
+
+
+class XunitFailure(XunitResult):
+    def __init__(self, name: str, test_base_dir: str, test_dir: str, text: str,
+                 config: BuildConfiguration, ndk_path: str) -> None:
+        super().__init__(name, test_base_dir, test_dir, config, ndk_path)
+        self.text = text
+
+    def get_build_dir(self, out_dir: str) -> str:
+        raise NotImplementedError
+
+    def run(self, _out_dir: str, _dist_dir: str,
+            _test_filters: TestFilter) -> Tuple[TestResult, List[Test]]:
+        return Failure(self, self.text), []
diff --git a/ndk/test/ui.py b/ndk/test/ui.py
index 5694d6e..212a2ec 100644
--- a/ndk/test/ui.py
+++ b/ndk/test/ui.py
@@ -14,27 +14,24 @@
 # limitations under the License.
 #
 """UI classes for test output."""
-from __future__ import absolute_import, print_function
+from __future__ import absolute_import
+from __future__ import print_function
 
 import os
-from typing import Any, List
+from typing import List
 
-from ndk.ansi import Console, font_bold, font_faint, font_reset
-from ndk.test.devices import Device
-from ndk.ui import AnsiUiRenderer, NonAnsiUiRenderer, Ui, UiRenderer
-from ndk.workqueue import ShardingWorkQueue, Worker
+from ndk.ansi import AnsiConsole, Console, font_bold, font_faint, font_reset
+from ndk.ui import Ui, UiRenderer, AnsiUiRenderer, DumbUiRenderer, columnate
+from ndk.test.devices import DeviceShardingGroup
+from ndk.workqueue import LoadRestrictingWorkQueue, ShardingWorkQueue, Worker
 
 
 class TestProgressUi(Ui):
     NUM_TESTS_DIGITS = 6
 
-    def __init__(
-        self,
-        ui_renderer: UiRenderer,
-        show_worker_status: bool,
-        show_device_groups: bool,
-        workqueue: ShardingWorkQueue[Any, Device],
-    ) -> None:
+    def __init__(self, ui_renderer: UiRenderer, show_worker_status: bool,
+                 show_device_groups: bool,
+                 workqueue: ShardingWorkQueue) -> None:
         super().__init__(ui_renderer)
         self.show_worker_status = show_worker_status
         self.show_device_groups = show_device_groups
@@ -47,51 +44,90 @@
             for group, group_queues in self.workqueue.work_queues.items():
                 for device, work_queue in group_queues.items():
                     style = font_bold()
-                    if all(w.status == Worker.IDLE_STATUS for w in work_queue.workers):
+                    if all([
+                            w.status == Worker.IDLE_STATUS
+                            for w in work_queue.workers
+                    ]):
                         style = font_faint()
-                    lines.append(f"{style}{device}{font_reset()}")
+                    lines.append(f'{style}{device}{font_reset()}')
                     for worker in work_queue.workers:
-                        style = ""
+                        style = ''
                         if worker.status == Worker.IDLE_STATUS:
                             style = font_faint()
-                        lines.append(f"  {style}{worker.status}{font_reset()}")
+                        lines.append(f'  {style}{worker.status}{font_reset()}')
 
-        lines.append(
-            "{: >{width}} tests remaining".format(
-                self.workqueue.num_tasks, width=self.NUM_TESTS_DIGITS
-            )
-        )
+        lines.append('{: >{width}} tests remaining'.format(
+            self.workqueue.num_tasks, width=self.NUM_TESTS_DIGITS))
 
         if self.show_device_groups:
-            for group in sorted(self.workqueue.task_queues.keys(), key=str):
-                group_id = f"{len(group.shards)} devices {group}"
-                lines.append(
-                    "{: >{width}} {}".format(
-                        self.workqueue.task_queues[group].qsize(),
-                        group_id,
-                        width=self.NUM_TESTS_DIGITS,
-                    )
-                )
+            for group in sorted(self.workqueue.task_queues.keys()):
+                assert isinstance(group, DeviceShardingGroup)
+                group_id = f'{len(group.devices)} devices {group}'
+                lines.append('{: >{width}} {}'.format(
+                    self.workqueue.task_queues[group].qsize(), group_id,
+                    width=self.NUM_TESTS_DIGITS))
 
         return lines
 
 
-def get_test_progress_ui(
-    console: Console, workqueue: ShardingWorkQueue[Any, Device]
-) -> TestProgressUi:
+def get_test_progress_ui(console: Console,
+                         workqueue: ShardingWorkQueue) -> TestProgressUi:
     ui_renderer: UiRenderer
     if console.smart_console:
         ui_renderer = AnsiUiRenderer(console)
         show_worker_status = True
         show_device_groups = True
-    elif os.name == "nt":
-        ui_renderer = NonAnsiUiRenderer(console)
+    elif os.name == 'nt':
+        ui_renderer = DumbUiRenderer(console)
         show_worker_status = False
         show_device_groups = False
     else:
-        ui_renderer = NonAnsiUiRenderer(console)
+        ui_renderer = DumbUiRenderer(console)
         show_worker_status = False
         show_device_groups = True
     return TestProgressUi(
-        ui_renderer, show_worker_status, show_device_groups, workqueue
-    )
+        ui_renderer, show_worker_status, show_device_groups, workqueue)
+
+
+class TestBuildProgressUi(Ui):
+    NUM_TESTS_DIGITS = 6
+
+    def __init__(self, ui_renderer: UiRenderer, show_worker_status: bool,
+                 workqueue: LoadRestrictingWorkQueue):
+        super().__init__(ui_renderer)
+        self.show_worker_status = show_worker_status
+        self.workqueue = workqueue
+
+    def get_ui_lines(self) -> List[str]:
+        lines = []
+
+        if self.show_worker_status:
+            for worker in self.workqueue.main_work_queue.workers:
+                lines.append(worker.status)
+            for worker in self.workqueue.restricted_work_queue.workers:
+                lines.append(worker.status)
+
+        if self.ui_renderer.console.smart_console:
+            assert isinstance(self.ui_renderer.console, AnsiConsole)
+            # Keep some space at the top of the UI so we can see messages.
+            ui_height = self.ui_renderer.console.height - 10
+            if ui_height > 0:
+                lines = columnate(lines, self.ui_renderer.console.width,
+                                  ui_height)
+
+        lines.append('{: >{width}} tests remaining'.format(
+            self.workqueue.num_tasks, width=self.NUM_TESTS_DIGITS))
+        return lines
+
+
+def get_test_build_progress_ui(
+        console: Console,
+        workqueue: LoadRestrictingWorkQueue) -> TestBuildProgressUi:
+    ui_renderer: UiRenderer
+    if console.smart_console:
+        ui_renderer = AnsiUiRenderer(console)
+        show_worker_status = True
+    else:
+        ui_renderer = DumbUiRenderer(console)
+        show_worker_status = False
+    return TestBuildProgressUi(ui_renderer, show_worker_status, workqueue)
diff --git a/ndk/test_deps.py b/ndk/test_deps.py
index 5450f8b..ad0fc32 100644
--- a/ndk/test_deps.py
+++ b/ndk/test_deps.py
@@ -14,16 +14,16 @@
 # limitations under the License.
 #
 """Test for ndk.deps."""
-import unittest
 from typing import Set
+import unittest
 
+from ndk.deps import CyclicDependencyError
+from ndk.deps import DependencyManager
 from ndk.builds import Module
-from ndk.deps import CyclicDependencyError, DependencyManager
 
 
-class MockModule(Module):
+class DummyModule(Module):
     """A no-op module base."""
-
     def validate(self) -> None:
         pass
 
@@ -36,66 +36,68 @@
 
 # A basic cycle. The cycle logic is tested more thoroughly in test_graph.py,
 # but we want to ensure that CyclicDependencyError is formatted nicely.
-class CycleA(MockModule):
-    name = "cycleA"
-    deps = {"cycleB"}
+class CycleA(DummyModule):
+    name = 'cycleA'
+    deps = {'cycleB'}
 
 
-class CycleB(MockModule):
-    name = "cycleB"
-    deps = {"cycleA"}
+class CycleB(DummyModule):
+    name = 'cycleB'
+    deps = {'cycleA'}
 
 
 # A module with no dependents or dependencies. Should be immediately buildable.
-class Isolated(MockModule):
-    name = "isolated"
+class Isolated(DummyModule):
+    name = 'isolated'
     deps: Set[str] = set()
 
 
 # A module that is not present in the build graph.
-class Unknown(MockModule):
-    name = "unknown"
+class Unknown(DummyModule):
+    name = 'unknown'
     deps: Set[str] = set()
 
 
 # A simple chain of two modules. The first should be immediately buildable, and
 # the second should become buildable after it completes.
-class SimpleA(MockModule):
-    name = "simpleA"
+class SimpleA(DummyModule):
+    name = 'simpleA'
     deps: Set[str] = set()
 
 
-class SimpleB(MockModule):
-    name = "simpleB"
-    deps = {"simpleA"}
+class SimpleB(DummyModule):
+    name = 'simpleB'
+    deps = {'simpleA'}
 
 
 # Slightly more complex module graph.
-class ComplexA(MockModule):
-    name = "complexA"
+class ComplexA(DummyModule):
+    name = 'complexA'
     deps: Set[str] = set()
 
 
-class ComplexB(MockModule):
-    name = "complexB"
-    deps = {"complexA"}
+class ComplexB(DummyModule):
+    name = 'complexB'
+    deps = {'complexA'}
 
 
-class ComplexC(MockModule):
-    name = "complexC"
-    deps = {"complexA"}
+class ComplexC(DummyModule):
+    name = 'complexC'
+    deps = {'complexA'}
 
 
-class ComplexD(MockModule):
-    name = "complexD"
-    deps = {"complexA", "complexB"}
+class ComplexD(DummyModule):
+    name = 'complexD'
+    deps = {'complexA', 'complexB'}
 
 
 class DependencyManagerTest(unittest.TestCase):
     def test_cyclic_dependency_message(self) -> None:
         """Test that a cycle raises the proper exception."""
-        pattern = "^Detected cyclic dependency: cycleA -> cycleB -> cycleA$"
-        with self.assertRaisesRegex(CyclicDependencyError, pattern):
+        pattern = '^Detected cyclic dependency: cycleA -> cycleB -> cycleA$'
+        # pylint: disable=deprecated-method
+        # https://github.com/PyCQA/pylint/issues/1946
+        with self.assertRaisesRegexp(CyclicDependencyError, pattern):
             DependencyManager([CycleA(), CycleB()])
 
     def test_empty_raises(self) -> None:
@@ -144,8 +146,7 @@
         complexD = ComplexD()
         deps = DependencyManager([complexA, complexB, complexC, complexD])
         self.assertSetEqual(
-            {complexB, complexC, complexD}, set(deps.blocked_modules.keys())
-        )
+            {complexB, complexC, complexD}, set(deps.blocked_modules.keys()))
         self.assertSetEqual({complexA}, deps.buildable_modules)
         self.assertSetEqual({complexA}, deps.get_buildable())
         self.assertSetEqual(set(), deps.buildable_modules)
diff --git a/ndk/test_graph.py b/ndk/test_graph.py
index a83ddd0..b3ab75a 100644
--- a/ndk/test_graph.py
+++ b/ndk/test_graph.py
@@ -14,8 +14,8 @@
 # limitations under the License.
 #
 """Test for ndk.graph."""
+from typing import cast, List, Optional
 import unittest
-from typing import List, Optional, cast
 
 import ndk.graph
 
@@ -63,18 +63,18 @@
 
     def test_self_cyclic(self) -> None:
         """Test that a cycle is found in a self-cyclic module."""
-        self.cycle_test(["AA"], "AA")
+        self.cycle_test(['AA'], 'AA')
 
     def test_no_source_raises(self) -> None:
         """Test that a cycle is found in a graph with no source."""
-        self.cycle_test(["ABCA"], "ABCA")
+        self.cycle_test(['ABCA'], 'ABCA')
 
     def test_find_cycle(self) -> None:
         """Test that cycles can be found."""
-        self.cycle_test(["ABCDB"], "BCDB")
-        self.cycle_test(["ABCB", "BD"], "BCB")
-        self.cycle_test(["CBA", "CDC"], "CDC")
+        self.cycle_test(['ABCDB'], 'BCDB')
+        self.cycle_test(['ABCB', 'BD'], 'BCB')
+        self.cycle_test(['CBA', 'CDC'], 'CDC')
 
     def test_no_cycle(self) -> None:
         """Test that None is returned when there is no cycle."""
-        self.assertIsNone(cycle_test(["ABCD", "CEF"]))
+        self.assertIsNone(cycle_test(['ABCD', 'CEF']))
diff --git a/ndk/test_ndkversionheadergenerator.py b/ndk/test_ndkversionheadergenerator.py
deleted file mode 100644
index 9811536..0000000
--- a/ndk/test_ndkversionheadergenerator.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from pathlib import Path
-
-from .ndkversionheadergenerator import NdkVersionHeaderGenerator
-
-
-def test_ndkversionheadergenerator_generate_str() -> None:
-    text = NdkVersionHeaderGenerator(
-        major=26, minor=0, beta=0, build_number=1234, canary=False
-    ).generate_str()
-    lines = text.splitlines()
-    assert "#define __NDK_MAJOR__ 26" in lines
-    assert "#define __NDK_MINOR__ 0" in lines
-    assert "#define __NDK_BETA__ 0" in lines
-    assert "#define __NDK_BUILD__ 1234" in lines
-    assert "#define __NDK_CANARY__ 0" in lines
-
-    text = NdkVersionHeaderGenerator(
-        major=27, minor=1, beta=2, build_number=0, canary=True
-    ).generate_str()
-    lines = text.splitlines()
-    assert "#define __NDK_MAJOR__ 27" in lines
-    assert "#define __NDK_MINOR__ 1" in lines
-    assert "#define __NDK_BETA__ 2" in lines
-    assert "#define __NDK_BUILD__ 0" in lines
-    assert "#define __NDK_CANARY__ 1" in lines
-
-
-def test_ndkversionheader_write(tmp_path: Path) -> None:
-    generator = NdkVersionHeaderGenerator(
-        major=26, minor=0, beta=0, build_number=1234, canary=False
-    )
-    text = generator.generate_str()
-    output = tmp_path / "ndk-version.h"
-    generator.write(output)
-    assert text == output.read_text()
diff --git a/ndk/testing/builders.py b/ndk/testing/builders.py
deleted file mode 100644
index 01bc062..0000000
--- a/ndk/testing/builders.py
+++ /dev/null
@@ -1,175 +0,0 @@
-#
-# Copyright (C) 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Tools for building test projects with CMake and ndk-build."""
-from __future__ import annotations
-
-import shutil
-import subprocess
-from pathlib import Path
-
-from ndk.abis import Abi
-from ndk.hosts import Host
-from ndk.paths import ANDROID_DIR
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
-
-
-class CMakeBuilder:
-    """Builds a CMake project in the given test configuration."""
-
-    def __init__(
-        self,
-        project: Path,
-        ndk: Path,
-        abi: Abi,
-        min_sdk_version: int,
-        toolchain_mode: CMakeToolchainFile,
-        cmake_flags: list[str] | None = None,
-    ) -> None:
-        self.project = project
-        self.ndk = ndk
-        self.abi = abi
-        self.min_sdk_version = min_sdk_version
-        if toolchain_mode is CMakeToolchainFile.Legacy:
-            self.toolchain_mode = "ON"
-        else:
-            self.toolchain_mode = "OFF"
-        if cmake_flags is None:
-            cmake_flags = []
-        self.cmake_flags = cmake_flags
-
-        # PythonBuildTest ensures that we're cd'd into the test out directory.
-        self.out_dir = Path("build")
-
-    @staticmethod
-    def from_build_config(
-        project: Path,
-        ndk: Path,
-        build_config: BuildConfiguration,
-        cmake_build_flags: list[str] | None = None,
-    ) -> CMakeBuilder:
-        assert build_config.api is not None
-        return CMakeBuilder(
-            project,
-            ndk,
-            build_config.abi,
-            build_config.api,
-            build_config.toolchain_file,
-            cmake_build_flags,
-        )
-
-    def build(self) -> str:
-        """Configures and runs the build.
-
-        stdout and stderr will be merged and returned if both stages succeed. If either
-        fails, subprocess.CalledProcessError will be thrown and the stdout property will
-        contain the merged output.
-        """
-        host = Host.current()
-        if host == Host.Windows64:
-            tag = "windows-x86"
-        else:
-            tag = f"{host.value}-x86"
-        cmake = ANDROID_DIR / f"prebuilts/cmake/{tag}/bin/cmake"
-        ninja = ANDROID_DIR / f"prebuilts/ninja/{tag}/ninja"
-        if host == Host.Windows64:
-            cmake = cmake.with_suffix(".exe")
-            ninja = ninja.with_suffix(".exe")
-        if self.out_dir.exists():
-            shutil.rmtree(self.out_dir)
-        self.out_dir.mkdir(parents=True)
-        toolchain_file = self.ndk / "build/cmake/android.toolchain.cmake"
-        cmd = [
-            str(cmake),
-            "-S",
-            str(self.project),
-            "-B",
-            str(self.out_dir),
-            f"-DCMAKE_TOOLCHAIN_FILE={toolchain_file}",
-            f"-DANDROID_ABI={self.abi}",
-            f"-DANDROID_PLATFORM=android-{self.min_sdk_version}",
-            f"-DANDROID_USE_LEGACY_TOOLCHAIN_FILE={self.toolchain_mode}",
-            "-GNinja",
-            f"-DCMAKE_MAKE_PROGRAM={ninja}",
-        ] + self.cmake_flags
-        subprocess.run(
-            cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True
-        )
-        return subprocess.run(
-            [str(ninja), "-C", str(self.out_dir), "-v"],
-            check=True,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.STDOUT,
-            text=True,
-        ).stdout
-
-
-class NdkBuildBuilder:
-    def __init__(
-        self,
-        project: Path,
-        ndk: Path,
-        abi: Abi,
-        min_sdk_version: int,
-        ndk_build_flags: list[str] | None = None,
-    ) -> None:
-        self.project = project
-        self.ndk = ndk
-        self.abi = abi
-        self.min_sdk_version = min_sdk_version
-        if ndk_build_flags is None:
-            ndk_build_flags = []
-        self.ndk_build_flags = ndk_build_flags
-        self.out_dir = self.project / "libs" / self.abi
-
-    @staticmethod
-    def from_build_config(
-        project: Path,
-        ndk: Path,
-        build_config: BuildConfiguration,
-        ndk_build_flags: list[str] | None = None,
-    ) -> NdkBuildBuilder:
-        assert build_config.api is not None
-        return NdkBuildBuilder(
-            project, ndk, build_config.abi, build_config.api, ndk_build_flags
-        )
-
-    def build(self) -> str:
-        """Runs the build.
-
-        stdout and stderr will be merged and returned if the build succeeds. If it
-        fails, subprocess.CalledProcessError will be thrown and the stdout property will
-        contain the merged output.
-        """
-        ndk_build = self.ndk / "ndk-build"
-        if Host.current() == Host.Windows64:
-            ndk_build = ndk_build.with_suffix(".cmd")
-
-        return subprocess.run(
-            [
-                str(ndk_build),
-                "-C",
-                str(self.project),
-                "-B",
-                "V=1",
-                f"APP_ABI={self.abi}",
-                f"APP_PLATFORM=android-{self.min_sdk_version}",
-            ]
-            + self.ndk_build_flags,
-            check=True,
-            stdout=subprocess.PIPE,
-            stderr=subprocess.STDOUT,
-            text=True,
-        ).stdout
diff --git a/ndk/testing/flag_verifier.py b/ndk/testing/flag_verifier.py
deleted file mode 100644
index 464b5e9..0000000
--- a/ndk/testing/flag_verifier.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Tools for verifying the presence or absence of flags in builds."""
-from __future__ import annotations
-
-import subprocess
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-
-from .builders import CMakeBuilder, NdkBuildBuilder
-
-
-class FlagVerifierResult:
-    """Base class for the result of FlagVerifier checks."""
-
-    def __init__(self, error_message: Optional[str]) -> None:
-        self.error_message = error_message
-
-    def failed(self) -> bool:
-        """Returns True if verification failed."""
-        raise NotImplementedError
-
-    def make_test_result_tuple(
-        self, message_prefix: str | None = None
-    ) -> tuple[bool, Optional[str]]:
-        """Creates a test result tuple in the format expect by run_test."""
-        if message_prefix is None:
-            message = self.error_message
-        else:
-            message = f"{message_prefix}\n{self.error_message}"
-        return not self.failed(), message
-
-
-class FlagVerifierSuccess(FlagVerifierResult):
-    """A successful flag verification result."""
-
-    def __init__(self) -> None:
-        super().__init__(error_message=None)
-
-    def failed(self) -> bool:
-        return False
-
-
-class FlagVerifierFailure(FlagVerifierResult):
-    """An unsuccessful flag verification result."""
-
-    def __init__(self, error_message: str) -> None:
-        super().__init__(error_message)
-
-    def failed(self) -> bool:
-        return True
-
-
-class FlagVerifier:
-    """Verifies that a build receives the expected flags."""
-
-    def __init__(
-        self, project: Path, ndk_path: Path, config: BuildConfiguration
-    ) -> None:
-        self.project = project
-        self.ndk_path = ndk_path
-        self.abi = config.abi
-        self.api = config.api
-        self.toolchain_mode = config.toolchain_file
-        self.expected_flags: list[str] = []
-        self.not_expected_flags: list[str] = []
-        self.ndk_build_flags: list[str] = []
-        self.cmake_flags: list[str] = []
-
-    def with_api(self, api: int) -> FlagVerifier:
-        self.api = api
-        return self
-
-    def with_ndk_build_flag(self, flag: str) -> FlagVerifier:
-        """Appends a flag to the list of arguments that will be passed to ndk-build."""
-        self.ndk_build_flags.append(flag)
-        return self
-
-    def with_cmake_flag(self, flag: str) -> FlagVerifier:
-        """Appends a flag to the list of arguments that will be passed to CMake."""
-        self.cmake_flags.append(flag)
-        return self
-
-    def expect_flag(self, flag: str) -> None:
-        """Verify that the given string is present in the build output.
-
-        Args:
-            flag: The literal string to search for in the output. Will be
-                  matched against whole whitespace-separated words in the
-                  output.
-        """
-        if flag in self.not_expected_flags:
-            raise ValueError(f"Flag {flag} both expected and not expected")
-        self.expected_flags.append(flag)
-
-    def expect_not_flag(self, flag: str) -> None:
-        """Verify that the given string is not present in the build output.
-
-        Args:
-            flag: The literal string to search for in the output. Will be
-                  matched against whole whitespace-separated words in the
-                  output.
-        """
-        if flag in self.expected_flags:
-            raise ValueError(f"Flag {flag} both expected and not expected")
-        self.not_expected_flags.append(flag)
-
-    def _check_output(self, output: str) -> FlagVerifierResult:
-        words = output.split(" ")
-        missing_flags: list[str] = []
-        wrong_flags: list[str] = []
-        for expected in self.expected_flags:
-            if expected not in words:
-                missing_flags.append(expected)
-        for not_expected in self.not_expected_flags:
-            if not_expected in words:
-                wrong_flags.append(not_expected)
-        if missing_flags:
-            return FlagVerifierFailure(
-                "Expected flags were not present in the build output: "
-                + ", ".join(missing_flags)
-                + f"\n{output}"
-            )
-        if wrong_flags:
-            return FlagVerifierFailure(
-                "Unexpected flags were present in the build output: "
-                + ", ".join(wrong_flags)
-                + f"\n{output}"
-            )
-        return FlagVerifierSuccess()
-
-    def verify(self) -> FlagVerifierResult:
-        """Verifies that both ndk-build and CMake behave as specified.
-
-        Returns:
-            A FlagVerifierResult object describing the verification result.
-        """
-        result = self.verify_cmake()
-        if result.failed():
-            return result
-        return self.verify_ndk_build()
-
-    def verify_ndk_build(self) -> FlagVerifierResult:
-        """Verifies that ndk-build behaves as specified.
-
-        Returns:
-            A FlagVerifierResult object describing the verification result.
-        """
-        try:
-            assert self.api is not None
-            output = NdkBuildBuilder(
-                self.project, self.ndk_path, self.abi, self.api, self.ndk_build_flags
-            ).build()
-        except subprocess.CalledProcessError as ex:
-            return FlagVerifierFailure(ex.stdout)
-
-        return self._check_output(output)
-
-    def verify_cmake(self) -> FlagVerifierResult:
-        """Verifies that CMake behaves as specified.
-
-        Returns:
-            A FlagVerifierResult object describing the verification result.
-        """
-        try:
-            assert self.api is not None
-            output = CMakeBuilder(
-                self.project,
-                self.ndk_path,
-                self.abi,
-                self.api,
-                self.toolchain_mode,
-                self.cmake_flags,
-            ).build()
-        except subprocess.CalledProcessError as ex:
-            return FlagVerifierFailure(ex.stdout)
-
-        return self._check_output(output)
diff --git a/ndk/testing/standalone_toolchain.py b/ndk/testing/standalone_toolchain.py
index 23052bf..6708d52 100644
--- a/ndk/testing/standalone_toolchain.py
+++ b/ndk/testing/standalone_toolchain.py
@@ -18,107 +18,85 @@
 import shutil
 import subprocess
 import tempfile
-import time
-from pathlib import Path
-from typing import Any
+from typing import Any, List, Tuple
 
 import ndk.abis
-import ndk.paths
-from ndk.hosts import Host
-from ndk.test.spec import BuildConfiguration
+from ndk.toolchains import LinkerOption
 
 
 def logger() -> logging.Logger:
     return logging.getLogger(__name__)
 
 
-def call_output(cmd: list[str], *args: Any, **kwargs: Any) -> tuple[int, Any]:
-    logger().info("COMMAND: %s", " ".join(cmd))
-    kwargs.update(
-        {
-            "stdout": subprocess.PIPE,
-            "stderr": subprocess.STDOUT,
-        }
-    )
-    with subprocess.Popen(cmd, *args, **kwargs) as proc:
-        out, _ = proc.communicate()
-        return proc.returncode, out
+def call_output(cmd: List[str], *args: Any, **kwargs: Any) -> Tuple[int, Any]:
+    logger().info('COMMAND: %s', ' '.join(cmd))
+    kwargs.update({
+        'stdout': subprocess.PIPE,
+        'stderr': subprocess.STDOUT,
+    })
+    proc = subprocess.Popen(cmd, *args, **kwargs)
+    out, _ = proc.communicate()
+    return proc.returncode, out
 
 
-def get_python_executable(ndk_path: Path) -> Path:
-    host = Host.current()
-    python_dir = ndk_path / "toolchains/llvm/prebuilt" / host.tag / "python3"
-    if host is Host.Windows64:
-        return python_dir / "python.exe"
-    return python_dir / "bin/python3"
+def make_standalone_toolchain(ndk_path: str, arch: str, api: int,
+                              extra_args: List[str],
+                              install_dir: str) -> Tuple[bool, str]:
+    make_standalone_toolchain_path = os.path.join(
+        ndk_path, 'build/tools/make_standalone_toolchain.py')
 
+    cmd = [make_standalone_toolchain_path, '--force',
+           '--install-dir=' + install_dir, '--arch=' + arch,
+           '--api={}'.format(api)] + extra_args
 
-def make_standalone_toolchain(
-    ndk_path: Path, config: BuildConfiguration, extra_args: list[str], install_dir: Path
-) -> tuple[bool, str]:
-    make_standalone_toolchain_path = (
-        ndk_path / "build/tools/make_standalone_toolchain.py"
-    )
+    if os.name == 'nt':
+        # Windows doesn't process shebang lines, and we wouldn't be pointing at
+        # the right Python if it did. Explicitly invoke the NDK's Python for on
+        # Windows.
+        prebuilt_dir = os.path.join(ndk_path, 'prebuilt/windows-x86_64')
+        if not os.path.exists(prebuilt_dir):
+            prebuilt_dir = os.path.join(ndk_path, 'prebuilt/windows')
+        if not os.path.exists(prebuilt_dir):
+            raise RuntimeError('Could not find prebuilts in {}'.format(
+                os.path.join(ndk_path, 'prebuilt')))
 
-    arch = ndk.abis.abi_to_arch(config.abi)
-    cmd = [
-        str(get_python_executable(ndk_path)),
-        str(make_standalone_toolchain_path),
-        "--force",
-        "--install-dir=" + str(install_dir),
-        "--arch=" + arch,
-        "--api={}".format(config.api),
-    ] + extra_args
+        python_path = os.path.join(prebuilt_dir, 'bin/python.exe')
+        cmd = [python_path] + cmd
 
     rc, out = call_output(cmd)
-    return rc == 0, out.decode("utf-8")
+    return rc == 0, out.decode('utf-8')
 
 
-def test_standalone_toolchain(
-    install_dir: Path, test_source: str, flags: list[str]
-) -> tuple[bool, str]:
-    compiler_name = "clang++"
+def test_standalone_toolchain(install_dir: str, test_source: str,
+                              flags: List[str],
+                              linker: LinkerOption) -> Tuple[bool, str]:
+    compiler_name = 'clang++'
 
-    compiler = install_dir / "bin" / compiler_name
-    cmd = [str(compiler), test_source, "-Wl,--no-undefined", "-Wl,--fatal-warnings"]
+    compiler = os.path.join(install_dir, 'bin', compiler_name)
+    cmd = [compiler, test_source, '-Wl,--no-undefined', '-Wl,--fatal-warnings']
+    if linker == LinkerOption.Lld:
+        cmd.append('-fuse-ld=lld')
     cmd += flags
-    if os.name == "nt":
+    if os.name == 'nt':
         # The Windows equivalent of exec doesn't know file associations so it
         # tries to load the batch file as an executable. Invoke it with cmd.
-        cmd = ["cmd", "/c"] + cmd
+        cmd = ['cmd', '/c'] + cmd
     rc, out = call_output(cmd)
-    return rc == 0, out.decode("utf-8")
+    return rc == 0, out.decode('utf-8')
 
 
-def run_test(
-    ndk_path: Path,
-    config: BuildConfiguration,
-    test_source: str,
-    extra_args: list[str],
-    flags: list[str],
-) -> tuple[bool, str]:
-    # On Windows, the default directory for temporary files may have a different
-    # (slow) configuration for security controls, indexing, etc. So we create
-    # temporary directories directly in "out".
-    install_dir = Path(
-        (
-            tempfile.mkdtemp(dir=ndk.paths.get_out_dir())
-            if os.name == "nt"
-            else tempfile.mkdtemp()
-        )
-    )
+def run_test(ndk_path: str, abi: ndk.abis.Abi, api: int, linker: LinkerOption,
+             test_source: str, extra_args: List[str],
+             flags: List[str]) -> Tuple[bool, str]:
+    arch = ndk.abis.abi_to_arch(abi)
+
+    install_dir = tempfile.mkdtemp()
     try:
         success, out = make_standalone_toolchain(
-            ndk_path, config, extra_args, install_dir
-        )
+            ndk_path, arch, api, extra_args, install_dir)
         if not success:
             return success, out
-        return test_standalone_toolchain(install_dir, test_source, flags)
+        return test_standalone_toolchain(install_dir, test_source, flags,
+                                         linker)
     finally:
-        # Try twice to delete the temporary directory, to work around
-        # occasional "file in use" errors on Windows.
-        try:
-            shutil.rmtree(install_dir)
-        except OSError:
-            time.sleep(10)
-            shutil.rmtree(install_dir)
+        shutil.rmtree(install_dir)
diff --git a/ndk/timer.py b/ndk/timer.py
index e9dc9c8..c13ac7a 100644
--- a/ndk/timer.py
+++ b/ndk/timer.py
@@ -32,7 +32,6 @@
     ...         do_something()
     ...     print(f'do_something() took {timer.duration}.')
     """
-
     def __init__(self) -> None:
         self.start_time: Optional[float] = None
         self.end_time: Optional[float] = None
@@ -54,10 +53,7 @@
     def __enter__(self) -> None:
         self.start()
 
-    def __exit__(
-        self,
-        _exc_type: Optional[Type[BaseException]],
-        _exc_value: Optional[BaseException],
-        _traceback: Optional[TracebackType],
-    ) -> None:
+    def __exit__(self, _exc_type: Optional[Type[BaseException]],
+                 _exc_value: Optional[BaseException],
+                 _traceback: Optional[TracebackType]) -> None:
         self.finish()
diff --git a/ndk/toolchains.py b/ndk/toolchains.py
index 3f1f0ba..1e719bf 100644
--- a/ndk/toolchains.py
+++ b/ndk/toolchains.py
@@ -14,92 +14,66 @@
 # limitations under the License.
 #
 """APIs for accessing toolchains."""
-import subprocess
+import enum
 from pathlib import Path
+import subprocess
 from typing import List
 
-import ndk.paths
 from ndk.hosts import Host, get_default_host
+import ndk.paths
 
-CLANG_VERSION = "clang-r522817"
+
+CLANG_VERSION = 'clang-r365631c'
 
 
 HOST_TRIPLE_MAP = {
-    Host.Darwin: "x86_64-apple-darwin",
-    Host.Linux: "x86_64-linux-gnu",
-    Host.Windows64: "x86_64-w64-mingw32",
+    Host.Darwin: 'x86_64-apple-darwin',
+    Host.Linux: 'x86_64-linux-gnu',
+    Host.Windows64: 'x86_64-w64-mingw32',
 }
 
 
+@enum.unique
+class LinkerOption(enum.Enum):
+    Default = 'default'
+    Lld = 'lld'
+
+
 class DarwinSdk:
     """The Darwin SDK."""
-
-    MACOSX_TARGET = "10.9"
+    MACOSX_TARGET = '10.8'
 
     def __init__(self) -> None:
-        self.mac_sdk_path = self._get_sdk_path()
-        self.linker_version = self._get_ld_version()
+        proc_result = subprocess.run(['xcrun', '--show-sdk-path'],
+                                     stdout=subprocess.PIPE,
+                                     check=True, encoding='utf-8')
+        self.mac_sdk_path = Path(proc_result.stdout.strip())
 
-        self.ar = self.sdk_tool("ar")
-        self.asm = self.sdk_tool("as")
-        self.ld = self.sdk_tool("ld")
-        self.nm = self.sdk_tool("nm")
-        self.ranlib = self.sdk_tool("ranlib")
-        self.strings = self.sdk_tool("strings")
-        self.strip = self.sdk_tool("strip")
+        self.ar = self.sdk_tool('ar')
+        self.asm = self.sdk_tool('as')
+        self.ld = self.sdk_tool('ld')
+        self.nm = self.sdk_tool('nm')
+        self.ranlib = self.sdk_tool('ranlib')
+        self.strings = self.sdk_tool('strings')
+        self.strip = self.sdk_tool('strip')
 
     @property
     def flags(self) -> List[str]:
         """The default flags to be used with the SDK."""
         return [
-            f"-mmacosx-version-min={self.MACOSX_TARGET}",
-            f"-DMACOSX_DEPLOYMENT_TARGET={self.MACOSX_TARGET}",
-            f"-isysroot{self.mac_sdk_path}",
-            f"-Wl,-syslibroot,{self.mac_sdk_path}",
-            # https://stackoverflow.com/a/60958449/632035
-            # Our Clang is not built to handle old linkers by default, so if we
-            # do not configure this explicitly it may attempt to use flags that
-            # are not supported by the version of the Darwin linker installed on
-            # the build machine.
-            f"-mlinker-version={self.linker_version}",
+            f'-mmacosx-version-min={self.MACOSX_TARGET}',
+            f'-DMACOSX_DEPLOYMENT_TARGET={self.MACOSX_TARGET}',
+            f'-isysroot{self.mac_sdk_path}',
+            f'-Wl,-syslibroot,{self.mac_sdk_path}',
         ]
 
-    @staticmethod
-    def sdk_tool(name: str) -> Path:
+    def sdk_tool(self, name: str) -> Path:
         """Returns the path to the given SDK tool."""
-        proc_result = subprocess.run(
-            ["xcrun", "--find", name],
-            stdout=subprocess.PIPE,
-            check=True,
-            encoding="utf-8",
-        )
+        proc_result = subprocess.run(['xcrun', '--find', name],
+                                     stdout=subprocess.PIPE,
+                                     check=True, encoding='utf-8')
         return Path(proc_result.stdout.strip())
 
-    @staticmethod
-    def _get_sdk_path() -> Path:
-        """Gets the path to the Mac SDK."""
-        proc_result = subprocess.run(
-            ["xcrun", "--show-sdk-path"],
-            stdout=subprocess.PIPE,
-            check=True,
-            encoding="utf-8",
-        )
-        return Path(proc_result.stdout.strip())
-
-    @staticmethod
-    def _get_ld_version() -> str:
-        """Gets the version of the system linker."""
-        proc_result = subprocess.run(
-            ["ld", "-v"],
-            stdout=subprocess.PIPE,
-            stderr=subprocess.PIPE,
-            check=True,
-            encoding="utf-8",
-        )
-        output = proc_result.stderr.strip().splitlines()[0]
-        # Example first line: @(#)PROGRAM:ld  PROJECT:ld64-409.12
-        return output.rsplit("-", 1)[-1]
-
 
 class Toolchain:
     """A compiler toolchain.
@@ -183,16 +157,55 @@
         raise NotImplementedError
 
 
-class Sysroot:
-    """A sysroot for the target platform."""
+class GccToolchain(Toolchain):
+    """A GCC compiler toolchain."""
 
-    def __init__(self, target: Host) -> None:
-        self.target = target
+    def gcc_tool(self, tool_name: str) -> Path:
+        """Returns the path to the GCC tool targeting the given host."""
+        return self.path / 'bin' / f'{self.triple}-{tool_name}'
+
+    @property
+    def ar(self) -> Path:
+        """The path to the archiver."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.ar
+        return self.gcc_tool('ar')
+
+    @property
+    def asm(self) -> Path:
+        """The path to the assembler."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.asm
+        return self.gcc_tool('as')
 
     @property
     def bin_paths(self) -> List[Path]:
         """The path to the toolchain binary directories for use with PATH."""
-        return [self.path / "bin"]
+        return [self.path / 'bin']
+
+    @property
+    def cc(self) -> Path:
+        """The path to the C compiler."""
+        return self.gcc_tool('gcc')
+
+    @property
+    def cxx(self) -> Path:
+        """The path to the C++ compiler."""
+        return self.gcc_tool('g++')
+
+    @property
+    def flags(self) -> List[str]:
+        """The default flags to be used with the compiler."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.flags
+        return []
+
+    @property
+    def ld(self) -> Path:
+        """The path to the linker."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.ld
+        return self.gcc_tool('ld')
 
     @property
     def lib_dirs(self) -> List[Path]:
@@ -201,50 +214,77 @@
         The GCC library directory contains libgcc and other compiler runtime
         libraries. These may be split across multiple directories.
         """
-        lib_dirs = [
-            self.path
-            / {
-                Host.Darwin: "lib/gcc/i686-apple-darwin11/4.2.1",
-                Host.Linux: "lib/gcc/x86_64-linux/4.8.3",
-                Host.Windows64: "lib/gcc/x86_64-w64-mingw32/4.8.3",
-            }[self.target]
-        ]
+        lib_dirs = [self.path / {
+            Host.Darwin: 'lib/gcc/i686-apple-darwin11/4.2.1',
+            Host.Linux: 'lib/gcc/x86_64-linux/4.8.3',
+            Host.Windows64: 'lib/gcc/x86_64-w64-mingw32/4.8.3',
+        }[self.target]]
         if self.target != Host.Darwin:
-            lib_dirs.append(self.path / self.triple / "lib64")
+            lib_dirs.append(self.path / self.triple / 'lib64')
         return lib_dirs
 
     @property
+    def nm(self) -> Path:
+        """The path to nm."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.nm
+        return self.gcc_tool('nm')
+
+    @property
     def path(self) -> Path:
         """Returns the path to the top level toolchain directory."""
         if self.target == Host.Darwin:
-            return (
-                ndk.paths.ANDROID_DIR
-                / "prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1"
-            )
-        if self.target == Host.Linux:
-            return (
-                ndk.paths.ANDROID_DIR
-                / "prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.17-4.8"
-            )
-        return (
-            ndk.paths.ANDROID_DIR
-            / "prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8"
-        )
+            return (ndk.paths.ANDROID_DIR /
+                    'prebuilts/gcc/darwin-x86/host/i686-apple-darwin-4.2.1')
+        elif self.target == Host.Linux:
+            return (ndk.paths.ANDROID_DIR /
+                    'prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.17-4.8')
+        else:
+            return (ndk.paths.ANDROID_DIR /
+                    'prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8')
+
+    @property
+    def ranlib(self) -> Path:
+        """The path to ranlib."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.ranlib
+        return self.gcc_tool('ranlib')
+
+    @property
+    def rescomp(self) -> Path:
+        """The path to the resource compiler."""
+        if not self.target.is_windows:
+            raise NotImplementedError
+        return self.gcc_tool('windres')
+
+    @property
+    def strip(self) -> Path:
+        """The path to strip."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.strip
+        return self.gcc_tool('strip')
+
+    @property
+    def strings(self) -> Path:
+        """The path to strings."""
+        if self.target == Host.Darwin:
+            return self.darwin_sdk.strings
+        return self.gcc_tool('strings')
 
     @property
     def sysroot(self) -> Path:
         """The path to the GCC sysroot."""
         if self.target == Host.Linux:
-            return self.path / "sysroot"
+            return self.path / 'sysroot'
         return self.path / self.triple
 
     @property
     def triple(self) -> str:
         """Returns the GCC triple for the host toolchain."""
         return {
-            Host.Darwin: "x86_64-apple-darwin11",
-            Host.Linux: "x86_64-linux",
-            Host.Windows64: "x86_64-w64-mingw32",
+            Host.Darwin: 'x86_64-apple-darwin11',
+            Host.Linux: 'x86_64-linux',
+            Host.Windows64: 'x86_64-w64-mingw32',
         }[self.target]
 
 
@@ -253,17 +293,18 @@
 
     def __init__(self, target: Host, host: Host = get_default_host()) -> None:
         super().__init__(target, host=host)
-        self.sysroot = Sysroot(target)
+        self.gcc_toolchain = GccToolchain(target, host=host)
 
     @staticmethod
     def path_for_host(host: Host) -> Path:
         """Returns the path to the Clang directory for the given host."""
         host_tag = {
-            Host.Darwin: "darwin-x86",
-            Host.Linux: "linux-x86",
-            Host.Windows64: "windows-x86",
+            Host.Darwin: 'darwin-x86',
+            Host.Linux: 'linux-x86',
+            Host.Windows64: 'windows-x86',
         }[host]
-        return ndk.paths.ANDROID_DIR / "prebuilts/clang/host" / host_tag / CLANG_VERSION
+        return (ndk.paths.ANDROID_DIR / 'prebuilts/clang/host' / host_tag /
+                CLANG_VERSION)
 
     @property
     def path(self) -> Path:
@@ -272,111 +313,95 @@
 
     def clang_tool(self, tool_name: str) -> Path:
         """Returns the path to the Clang tool for the build host."""
-        return self.path / "bin" / tool_name
+        return self.path / 'bin' / tool_name
 
     @property
     def ar(self) -> Path:
         """The path to the archiver."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.ar
-        return self.clang_tool("llvm-ar")
+        return self.gcc_toolchain.ar
 
     @property
     def asm(self) -> Path:
         """The path to the assembler."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.asm
-        return self.cc
+        return self.gcc_toolchain.asm
 
     @property
     def bin_paths(self) -> List[Path]:
         """The path to the toolchain binary directories for use with PATH."""
-        return [self.path / "bin"]
+        return self.gcc_toolchain.bin_paths + [self.path / 'bin']
 
     @property
     def cc(self) -> Path:
-        return self.clang_tool("clang")
+        return self.clang_tool('clang')
 
     @property
     def cxx(self) -> Path:
-        return self.clang_tool("clang++")
+        return self.clang_tool('clang++')
 
     @property
     def lib_dirs(self) -> List[Path]:
-        lib_dirs = self.sysroot.lib_dirs
+        lib_dirs = self.gcc_toolchain.lib_dirs
         # libc++ library path. Static only for Windows.
         if self.target.is_windows:
-            lib_dirs.append(self.path_for_host(self.target) / "lib64")
+            lib_dirs.append(self.path_for_host(self.target) / 'lib64')
         else:
-            lib_dirs.append(self.path / "lib64")
+            lib_dirs.append(self.path / 'lib64')
         return lib_dirs
 
     @property
     def flags(self) -> List[str]:
         host_triple = HOST_TRIPLE_MAP[self.target]
+        toolchain_bin = (
+            self.gcc_toolchain.path / self.gcc_toolchain.triple / 'bin')
         flags = [
-            f"--target={host_triple}",
+            f'--target={host_triple}',
+            f'-B{toolchain_bin}',
         ]
 
         if self.target.is_windows:
-            flags.append("-I" + str(self.path_for_host(self.target) / "include/c++/v1"))
+            flags.append('-I' + str(self.path_for_host(self.target) / 'include/c++/v1'))
 
         if self.target == Host.Darwin:
             flags.extend(self.darwin_sdk.flags)
-            flags.append(f"-L{self.path}/lib")
         else:
-            flags.append(f"--sysroot={self.sysroot.sysroot}")
+            flags.append(f'--sysroot={self.gcc_toolchain.sysroot}')
 
             for lib_dir in self.lib_dirs:
                 # Both -L and -B because Clang only searches for CRT
                 # objects in -B directories.
-                flags.extend(
-                    [
-                        f"-L{lib_dir}",
-                        f"-B{lib_dir}",
-                    ]
-                )
+                flags.extend([
+                    f'-L{lib_dir}',
+                    f'-B{lib_dir}',
+                ])
 
         return flags
 
     @property
     def ld(self) -> Path:
         """The path to the linker."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.ld
-        return self.clang_tool("ld.lld")
+        return self.gcc_toolchain.ld
 
     @property
     def nm(self) -> Path:
         """The path to nm."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.nm
-        return self.clang_tool("llvm-nm")
+        return self.gcc_toolchain.nm
 
     @property
     def ranlib(self) -> Path:
         """The path to ranlib."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.ranlib
-        return self.clang_tool("llvm-ranlib")
+        return self.gcc_toolchain.ranlib
 
     @property
     def rescomp(self) -> Path:
         """The path to the resource compiler."""
-        if not self.target.is_windows:
-            raise NotImplementedError
-        return self.clang_tool("llvm-windres")
+        return self.gcc_toolchain.rescomp
 
     @property
     def strip(self) -> Path:
         """The path to strip."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.strip
-        return self.clang_tool("llvm-strip")
+        return self.gcc_toolchain.strip
 
     @property
     def strings(self) -> Path:
         """The path to strings."""
-        if self.target == Host.Darwin:
-            return self.darwin_sdk.strings
-        return self.clang_tool("llvm-strings")
+        return self.gcc_toolchain.strings
diff --git a/ndk/tools/ndkgitprebuilts.py b/ndk/tools/ndkgitprebuilts.py
deleted file mode 100644
index e7ab4e2..0000000
--- a/ndk/tools/ndkgitprebuilts.py
+++ /dev/null
@@ -1,401 +0,0 @@
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Script for updating the prebuilt NDK installed to a git repo.
-
-Run with `poetry run update-prebuilt-ndk`.
-"""
-from __future__ import annotations
-
-import asyncio
-import logging
-import re
-import shlex
-import shutil
-import sys
-import textwrap
-from abc import ABC, abstractmethod
-from contextlib import nullcontext
-from pathlib import Path
-from tempfile import TemporaryDirectory, mkdtemp
-from typing import ContextManager
-
-import click
-from aiohttp import ClientSession
-from fetchartifact import fetch_artifact_chunked
-
-
-def is_filesystem_case_sensitive(path: Path) -> bool:
-    """Returns True if the file system the given path belongs to is case-sensitive."""
-    if not path.exists():
-        path.mkdir(parents=True)
-    elif not path.is_dir():
-        raise ValueError(f"{path} is not a directory")
-
-    temp_dir = Path(mkdtemp(prefix=f"{path}/"))
-    try:
-        (temp_dir / "a").touch()
-        return not (temp_dir / "A").exists()
-    finally:
-        shutil.rmtree(temp_dir)
-
-
-async def run(cmd: list[str], cwd: Path | None = None) -> None:
-    """Runs and logs an asyncio subprocess."""
-    logging.debug("exec CWD=%s %s", cwd or Path.cwd(), shlex.join(cmd))
-    proc = await asyncio.create_subprocess_exec(cmd[0], *cmd[1:], cwd=cwd)
-    await proc.communicate()
-    if proc.returncode != 0:
-        raise RuntimeError(f"Command failed: CWD={cwd or Path.cwd()} {shlex.join(cmd)}")
-
-
-async def run_piped(cmd: list[str], cwd: Path | None = None) -> bytes:
-    """Runs and logs an asyncio subprocess.
-
-    stdout and stderr will be combined and returned as bytes.
-    """
-    logging.debug("exec CWD=%s %s", cwd or Path.cwd(), shlex.join(cmd))
-    proc = await asyncio.create_subprocess_exec(
-        cmd[0],
-        *cmd,
-        cwd=cwd,
-        stdout=asyncio.subprocess.PIPE,
-        stderr=asyncio.subprocess.STDOUT,
-    )
-    stdout, _ = await proc.communicate()
-    return stdout
-
-
-async def run_shell(cmd: str, cwd: Path | None = None) -> None:
-    """Runs and logs an asyncio subprocess."""
-    logging.debug("shell CWD=%s %s", cwd or Path.cwd(), cmd)
-    proc = await asyncio.create_subprocess_shell(cmd, cwd=cwd)
-    await proc.communicate()
-    if proc.returncode != 0:
-        raise RuntimeError(f"Command failed: CWD={cwd or Path.cwd()} {cmd}")
-
-
-class NdkSource(ABC):
-    @abstractmethod
-    def commit_summary(self) -> str: ...
-
-    @abstractmethod
-    async def download_zip(self, destination: Path) -> None: ...
-
-    @abstractmethod
-    def infer_major_version(self) -> int | None:
-        """Infers the major version from the source, if possible."""
-
-    @staticmethod
-    def from_str(ndk_source: str) -> NdkSource:
-        if ndk_source.startswith("r"):
-            return ReleasedNdk(ndk_source)
-        if (path := Path(ndk_source)).exists():
-            return ZippedNdk(path)
-        return CanaryNdk(ndk_source)
-
-
-class ReleasedNdk(NdkSource):
-    def __init__(self, version: str) -> None:
-        super().__init__()
-        self.version = version
-
-    def commit_summary(self) -> str:
-        return f"Update to NDK {self.version}."
-
-    def infer_major_version(self) -> int | None:
-        pattern = r"r(\d+).*"
-        if (match := re.search(pattern, self.version)) is not None:
-            return int(match.group(1))
-        raise ValueError(
-            f"NDK version {self.version} did not match expected pattern {pattern}"
-        )
-
-    @property
-    def url(self) -> str:
-        return f"https://dl.google.com/android/repository/android-ndk-{self.version}-linux.zip"
-
-    async def download_zip(self, destination: Path) -> None:
-        logging.info("Downloading NDK from %s", self.url)
-        async with ClientSession() as session:
-            async with session.get(self.url) as response:
-                with destination.open("wb") as output:
-                    async for chunk in response.content.iter_chunked(4 * 1024 * 1024):
-                        output.write(chunk)
-
-
-class CanaryNdk(NdkSource):
-    def __init__(self, build_id: str) -> None:
-        super().__init__()
-        self.build_id = build_id
-
-    def commit_summary(self) -> str:
-        return f"Update to canary build {self.build_id}."
-
-    def infer_major_version(self) -> int | None:
-        return None
-
-    async def download_zip(self, destination: Path) -> None:
-        async with ClientSession() as session:
-            with destination.open("wb") as output:
-                async for chunk in fetch_artifact_chunked(
-                    "linux",
-                    self.build_id,
-                    f"android-ndk-{self.build_id}-linux-x86_64.zip",
-                    session,
-                ):
-                    output.write(chunk)
-
-
-class ZippedNdk(NdkSource):
-    def __init__(self, path: Path) -> None:
-        super().__init__()
-        self.path = path
-
-    def commit_summary(self) -> str:
-        return f"(DO NOT SUBMIT) Update with local NDK."
-
-    def infer_major_version(self) -> int | None:
-        return None
-
-    async def download_zip(self, destination: Path) -> None:
-        shutil.copy(self.path, destination)
-
-
-class PrebuiltsRepo:
-    def __init__(
-        self, path: Path, ndk_major_version: int | None, ndk_source: NdkSource
-    ) -> None:
-        self.path = path
-        self.ndk_major_version = ndk_major_version
-        self.ndk_source = ndk_source
-
-    async def prepare_for_install(self, force: bool) -> None:
-        await self.ensure_latest_master(force)
-        await self.remove_contents()
-
-    async def ensure_latest_master(self, force: bool) -> None:
-        """Clones or updates the NDK prebuilt repo in self.git_repo_path."""
-        if (self.path / ".git").exists():
-            await self.update_git_repo(force)
-        else:
-            await self.clone_git_repo()
-
-    async def update_git_repo(self, force: bool) -> None:
-        """Updates the NDK prebuilt repo in self.path."""
-        if not force:
-            await self.check_if_repo_clean()
-        await self.checkout_master(force)
-        if force:
-            await self._git(["clean", "-df"])
-        await self._git(["pull"])
-
-    async def check_if_repo_clean(self) -> None:
-        """Raises if the repository has uncommitted changes."""
-        output = (await self._git_piped(["status", "--porcelain"])).decode("utf-8")
-        if output:
-            raise RuntimeError(
-                f"Cannot update {self.path} because there are uncommitted changes or"
-                f"untracked files:\n{output}"
-            )
-
-    async def checkout_master(self, force: bool) -> None:
-        """Switches to the master branch."""
-        args = ["checkout"]
-        if force:
-            args.append("-f")
-        args.append("master")
-        await self._git(args)
-
-    async def clone_git_repo(self) -> None:
-        """Clones the NDK prebuilt repo in self.git_repo_path."""
-        assert self.ndk_major_version is not None
-        repo_base = "https://android.googlesource.com/toolchain/prebuilts/ndk"
-        await run(
-            [
-                "git",
-                "clone",
-                f"{repo_base}/r{self.ndk_major_version}",
-                str(self.path),
-            ]
-        )
-
-    async def remove_contents(self) -> None:
-        await self._git(["rm", "-rf", "."])
-
-    async def _git(self, cmd: list[str]) -> None:
-        await run(["git", "-C", str(self.path)] + cmd)
-
-    async def _git_piped(self, cmd: list[str]) -> bytes:
-        return await run_piped(["git", "-C", str(self.path)] + cmd)
-
-    async def install_from(self, ndk_zip: Path) -> None:
-        await self.unzip_to_repo(ndk_zip)
-        self.fixup_install()
-        await self.create_commit()
-
-    async def unzip_to_repo(self, ndk_zip: Path) -> None:
-        assert ndk_zip.exists()
-        # Not using TemporaryDirectory because we want to make sure it's on the same
-        # filesystem as the repo so we can mv rather than cp.
-        temp_dir = self.path / ".extract"
-        if temp_dir.exists():
-            shutil.rmtree(temp_dir)
-        temp_dir.mkdir()
-        try:
-            await run(["unzip", "-d", str(temp_dir), str(ndk_zip)])
-            # We should have extracted a single directory.
-            subdirs = list(temp_dir.iterdir())
-            assert len(subdirs) == 1
-            ndk_dir = subdirs[0]
-            for item in ndk_dir.iterdir():
-                item.rename(self.path / item.name)
-        finally:
-            shutil.rmtree(temp_dir)
-
-    def fixup_install(self) -> None:
-        (self.path / "Android.mk").write_text(
-            textwrap.dedent(
-                """\
-                # Intentionally empty to prevent loading subdir Android.mk files.
-                # The distributed NDK includes a handful of Android.mk files for use
-                # with ndk-build via import-module, but without an empty Android.mk at
-                # the top level, the platform build system will try to use them.
-                """
-            )
-        )
-
-    async def create_commit(self) -> None:
-        await self.install_commit_hook()
-        await self._git(["add", "-A"])
-        message = textwrap.dedent(
-            f"""\
-            {self.ndk_source.commit_summary()}
-
-            Test: treehugger
-            Bug: None
-            """
-        )
-        await self._git(["commit", "-a", "-m", message])
-
-    async def install_commit_hook(self) -> None:
-        commit_hook_url = (
-            "https://gerrit-review.googlesource.com/tools/hooks/commit-msg"
-        )
-        await run_shell(
-            "f=`git rev-parse --git-dir`/hooks/commit-msg ; mkdir -p $(dirname $f) ; "
-            f"curl -Lo $f {commit_hook_url} ; chmod +x $f",
-            cwd=self.path,
-        )
-
-    async def upload(self) -> None:
-        await self._git(["push", "origin", "HEAD:refs/for/master"])
-
-
-class App:
-    def __init__(
-        self,
-        ndk_source: NdkSource,
-        ndk_major_version: int | None,
-        working_directory: Path,
-        force_reset_git_repo: bool,
-    ) -> None:
-        self.prebuilts_repo = PrebuiltsRepo(
-            working_directory / "git_repo", ndk_major_version, ndk_source
-        )
-        self.ndk_source = ndk_source
-        self.working_directory = working_directory
-        self.force_reset_git_repo = force_reset_git_repo
-
-    async def run(self) -> None:
-        logging.debug("Updating prebuilt NDK at %s", self.prebuilts_repo.path)
-        dest = self.working_directory / "ndk.zip"
-        await asyncio.gather(
-            self.ndk_source.download_zip(dest),
-            self.prebuilts_repo.prepare_for_install(self.force_reset_git_repo),
-        )
-        await self.prebuilts_repo.install_from(dest)
-        await self.prebuilts_repo.upload()
-
-    @staticmethod
-    @click.command()
-    @click.option(
-        "-v",
-        "--verbose",
-        count=True,
-        default=0,
-        help="Increase verbosity (repeatable).",
-    )
-    @click.option(
-        "--working-directory",
-        type=click.Path(file_okay=False, resolve_path=True, path_type=Path),
-        help=(
-            "Use the given directory as the working directory rather than a temporary "
-            "directory. Will not be cleaned up on program exit."
-        ),
-    )
-    @click.option(
-        "--ndk-major-version",
-        type=int,
-        help=(
-            "Major version of the NDK prebuilts. If --git-repo is not used, this will "
-            "determine which version of the prebuilts to clone."
-        ),
-    )
-    @click.option(
-        "-f", "--force", is_flag=True, help="Forcibly resets the state of --git-repo."
-    )
-    @click.argument("ndk_source")
-    def main(
-        working_directory: Path | None,
-        verbose: int,
-        ndk_source: str,
-        ndk_major_version: int | None,
-        force: bool,
-    ) -> None:
-        """Updates the NDK checked in to toolchain/prebuilts/ndk/$VERSION.
-
-        NDK_SOURCE is the version of the NDK to install to prebuilts. This can be
-        either an NDK version name such as r25c, which will download that release from
-        dl.google.com; a build ID, which will download that canary build from
-        ci.android.com; or a path to a local file, which will be used as-is. A local
-        file should not be used except for testing. Only release or CI artifacts should
-        ever be checked in.
-        """
-        log_levels = [logging.WARNING, logging.INFO, logging.DEBUG]
-        logging.basicConfig(level=log_levels[min(verbose, len(log_levels) - 1)])
-        ndk = NdkSource.from_str(ndk_source)
-        if ndk_major_version is None:
-            ndk_major_version = ndk.infer_major_version()
-        if ndk_major_version is None:
-            sys.exit(
-                "Could not determine NDK major version from NDK_SOURCE "
-                "({ndk_source}) and neither --git-repo nor --ndk-major-version was "
-                "used."
-            )
-
-        if working_directory is None:
-            working_directory_ctx: ContextManager[Path | str] = TemporaryDirectory()
-        else:
-            working_directory_ctx = nullcontext(working_directory)
-        with working_directory_ctx as temp_dir_str:
-            temp_dir = Path(temp_dir_str)
-            if not is_filesystem_case_sensitive(temp_dir):
-                sys.exit(
-                    f"Working directory {temp_dir} is not case-sensitive. If your "
-                    "system's temp directory is not case-sensitive, you must use "
-                    "--working-directory."
-                )
-            asyncio.run(App(ndk, ndk_major_version, temp_dir, force).run())
diff --git a/ndk/ui.py b/ndk/ui.py
index d46c1c8..291f528 100644
--- a/ndk/ui.py
+++ b/ndk/ui.py
@@ -14,13 +14,15 @@
 # limitations under the License.
 #
 """UI classes for build output."""
-from __future__ import absolute_import, division, print_function
+from __future__ import absolute_import
+from __future__ import print_function
+from __future__ import division
 
 import math
 import os
 import sys
 import time
-from typing import Callable, Iterable, List, Optional, Tuple, cast
+from typing import Iterable, List, Optional, Tuple, cast
 
 import ndk.ansi
 from ndk.workqueue import AnyWorkQueue
@@ -47,7 +49,8 @@
     # Number of seconds to delay between each draw command when debugging.
     debug_draw_delay = 0.1
 
-    def __init__(self, console: ndk.ansi.Console, debug_draw: bool = False) -> None:
+    def __init__(self, console: ndk.ansi.Console,
+                 debug_draw: bool = False) -> None:
         super().__init__(console)
         self.last_rendered_lines: List[str] = []
         self.debug_draw = debug_draw
@@ -76,14 +79,14 @@
         """
         if self.debug_draw:
             for cmd in commands:
-                self.console.print(cmd, end="")
+                self.console.print(cmd, end='')
                 time.sleep(self.debug_draw_delay)
         else:
-            self.console.print("".join(commands), end="")
+            self.console.print(''.join(commands), end='')
 
     def render(self, lines: List[str]) -> None:
         if not self.last_rendered_lines:
-            self.console.print(os.linesep.join(lines), end="")
+            self.console.print(os.linesep.join(lines), end='')
         elif len(lines) != len(self.last_rendered_lines):
             self.clear_last_render()
             self.render(lines)
@@ -106,10 +109,11 @@
         self.last_rendered_lines = lines
 
 
-class NonAnsiUiRenderer(UiRenderer):
-    """Renders a UI to a non-ANSI console."""
+class DumbUiRenderer(UiRenderer):
+    """Renders a UI to a dumb console."""
 
-    def __init__(self, console: ndk.ansi.Console, redraw_rate: int = 30) -> None:
+    def __init__(self, console: ndk.ansi.Console,
+                 redraw_rate: int = 30) -> None:
         super().__init__(console)
         self.redraw_rate = redraw_rate
         self.last_draw: Optional[float] = None
@@ -159,7 +163,8 @@
 class BuildProgressUi(Ui):
     """A UI for displaying build status."""
 
-    def __init__(self, ui_renderer: UiRenderer, workqueue: AnyWorkQueue) -> None:
+    def __init__(self, ui_renderer: UiRenderer,
+                 workqueue: AnyWorkQueue) -> None:
         super().__init__(ui_renderer)
         self.workqueue = workqueue
 
@@ -172,18 +177,20 @@
         return lines
 
 
-def get_build_progress_ui(console: ndk.ansi.Console, workqueue: AnyWorkQueue) -> Ui:
+def get_build_progress_ui(console: ndk.ansi.Console,
+                          workqueue: AnyWorkQueue) -> Ui:
     """Returns the appropriate build console UI for the given console."""
     ui_renderer: UiRenderer
     if console.smart_console:
         ui_renderer = AnsiUiRenderer(console)
         return BuildProgressUi(ui_renderer, workqueue)
-    ui_renderer = NonAnsiUiRenderer(console)
-    return NonAnsiBuildProgressUi(ui_renderer)
+    else:
+        ui_renderer = DumbUiRenderer(console)
+        return DumbBuildProgressUi(ui_renderer)
 
 
-class NonAnsiBuildProgressUi(Ui):
-    """A UI for displaying build status to non-ANSI consoles."""
+class DumbBuildProgressUi(Ui):
+    """A UI for displaying build status to dumb consoles."""
 
     def get_ui_lines(self) -> List[str]:
         return []
@@ -198,21 +205,22 @@
         pass
 
 
-def get_work_queue_ui(console: ndk.ansi.Console, workqueue: AnyWorkQueue) -> Ui:
+def get_work_queue_ui(console: ndk.ansi.Console,
+                      workqueue: AnyWorkQueue) -> Ui:
     """Returns the appropriate work queue console UI for the given console."""
     ui_renderer: UiRenderer
     if console.smart_console:
         ui_renderer = AnsiUiRenderer(console)
         show_worker_status = True
     else:
-        ui_renderer = NonAnsiUiRenderer(console)
+        ui_renderer = DumbUiRenderer(console)
         show_worker_status = False
     return WorkQueueUi(ui_renderer, show_worker_status, workqueue)
 
 
 def columnate(lines: List[str], max_width: int, max_height: int) -> List[str]:
     """Distributes lines of text into height limited columns."""
-    if os.name == "nt":
+    if os.name == 'nt':
         # Not yet implemented.
         return lines
 
@@ -225,7 +233,7 @@
     rows = [lines[r::num_rows] for r in range(num_rows)]
 
     column_width = max_width // num_columns
-    return ["".join(s.ljust(column_width) for s in row) for row in rows]
+    return [''.join(s.ljust(column_width) for s in row) for row in rows]
 
 
 class WorkQueueUi(Ui):
@@ -233,9 +241,8 @@
 
     NUM_TESTS_DIGITS = 6
 
-    def __init__(
-        self, ui_renderer: UiRenderer, show_worker_status: bool, workqueue: AnyWorkQueue
-    ) -> None:
+    def __init__(self, ui_renderer: UiRenderer, show_worker_status: bool,
+                 workqueue: AnyWorkQueue) -> None:
         super().__init__(ui_renderer)
         self.show_worker_status = show_worker_status
         self.workqueue = workqueue
@@ -254,25 +261,6 @@
             if ui_height > 0:
                 lines = columnate(lines, ansi_console.width, ui_height)
 
-        lines.append(
-            "{: >{width}} jobs remaining".format(
-                self.workqueue.num_tasks, width=self.NUM_TESTS_DIGITS
-            )
-        )
+        lines.append('{: >{width}} jobs remaining'.format(
+            self.workqueue.num_tasks, width=self.NUM_TESTS_DIGITS))
         return lines
-
-
-def finish_workqueue_with_ui(
-    workqueue: ndk.workqueue.WorkQueue,
-    ui_fn: Callable[[ndk.ansi.Console, ndk.workqueue.WorkQueue], Ui],
-) -> None:
-    console = ndk.ansi.get_console()
-    ui = ui_fn(console, workqueue)
-    with ndk.ansi.disable_terminal_echo(sys.stdin):
-        with console.cursor_hide_context():
-            ui.draw()
-            while not workqueue.finished():
-                ui.draw()
-                workqueue.get_result()
-                ui.draw()
-            ui.clear()
diff --git a/ndk/win32.py b/ndk/win32.py
index d1c4e86..e3318c7 100644
--- a/ndk/win32.py
+++ b/ndk/win32.py
@@ -15,10 +15,11 @@
 #
 """Python interfaces for win32 APIs."""
 from __future__ import absolute_import
+from typing import Optional
 
 import ctypes
 import ctypes.wintypes
-from typing import Optional
+
 
 # From winnt.h
 JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x00002000
@@ -27,59 +28,57 @@
 
 class IO_COUNTERS(ctypes.Structure):
     _fields_ = [
-        ("ReadOperationCount", ctypes.c_ulonglong),
-        ("WriteOperationCount", ctypes.c_ulonglong),
-        ("OtherOperationCount", ctypes.c_ulonglong),
-        ("ReadTransferCount", ctypes.c_ulonglong),
-        ("WriteTransferCount", ctypes.c_ulonglong),
-        ("OtherTransferCount", ctypes.c_ulonglong),
+        ('ReadOperationCount', ctypes.c_ulonglong),
+        ('WriteOperationCount', ctypes.c_ulonglong),
+        ('OtherOperationCount', ctypes.c_ulonglong),
+        ('ReadTransferCount', ctypes.c_ulonglong),
+        ('WriteTransferCount', ctypes.c_ulonglong),
+        ('OtherTransferCount', ctypes.c_ulonglong),
     ]
 
 
 class JOBOBJECT_BASIC_LIMIT_INFORMATION(ctypes.Structure):
     _fields_ = [
-        ("PerProcessUserTimeLimit", ctypes.wintypes.LARGE_INTEGER),
-        ("PerJobUserTimeLimit", ctypes.wintypes.LARGE_INTEGER),
-        ("LimitFlags", ctypes.wintypes.DWORD),
-        ("MinimumWorkingSetSize", ctypes.c_size_t),
-        ("MaximumWorkingSetSize", ctypes.c_size_t),
-        ("ActiveProcessLimit", ctypes.wintypes.DWORD),
-        ("Affinity", ctypes.POINTER(ctypes.c_ulong)),
-        ("PriorityClass", ctypes.wintypes.DWORD),
-        ("SchedulingClass", ctypes.wintypes.DWORD),
+        ('PerProcessUserTimeLimit', ctypes.wintypes.LARGE_INTEGER),
+        ('PerJobUserTimeLimit', ctypes.wintypes.LARGE_INTEGER),
+        ('LimitFlags', ctypes.wintypes.DWORD),
+        ('MinimumWorkingSetSize', ctypes.c_size_t),
+        ('MaximumWorkingSetSize', ctypes.c_size_t),
+        ('ActiveProcessLimit', ctypes.wintypes.DWORD),
+        ('Affinity', ctypes.POINTER(ctypes.c_ulong)),
+        ('PriorityClass', ctypes.wintypes.DWORD),
+        ('SchedulingClass', ctypes.wintypes.DWORD),
     ]
 
-
 class JOBOBJECT_EXTENDED_LIMIT_INFORMATION(ctypes.Structure):
     _fields_ = [
-        ("BasicLimitInformation", JOBOBJECT_BASIC_LIMIT_INFORMATION),
-        ("IoInfo", IO_COUNTERS),
-        ("ProcessMemoryLimit", ctypes.c_size_t),
-        ("JobMemoryLimit", ctypes.c_size_t),
-        ("PeakProcessMemoryUsed", ctypes.c_size_t),
-        ("PeakJobMemoryUsed", ctypes.c_size_t),
+        ('BasicLimitInformation', JOBOBJECT_BASIC_LIMIT_INFORMATION),
+        ('IoInfo', IO_COUNTERS),
+        ('ProcessMemoryLimit', ctypes.c_size_t),
+        ('JobMemoryLimit', ctypes.c_size_t),
+        ('PeakProcessMemoryUsed', ctypes.c_size_t),
+        ('PeakJobMemoryUsed', ctypes.c_size_t),
     ]
 
 
 # mypy needs to ignore this line because this only typechecks successfully for
 # Windows.
 class UseLastErrorWinDLL(ctypes.WinDLL):  # type: ignore
-    def __init__(
-        self, name: str, mode: int = ctypes.DEFAULT_MODE, handle: Optional[int] = None
-    ) -> None:
+    def __init__(self,
+                 name: str,
+                 mode: int = ctypes.DEFAULT_MODE,
+                 handle: int = None) -> None:
         super().__init__(name, mode, handle, use_last_error=True)
 
-
 _LOADER = ctypes.LibraryLoader(UseLastErrorWinDLL)
 
 
-def CreateJobObject(
-    attributes: Optional[ctypes.Structure] = None, name: Optional[str] = None
-) -> ctypes.wintypes.HANDLE:
+def CreateJobObject(attributes: Optional[ctypes.Structure] = None,
+                    name: str = None) -> ctypes.wintypes.HANDLE:
     fn_CreateJobObjectW = _LOADER.kernel32.CreateJobObjectW
     fn_CreateJobObjectW.restype = ctypes.wintypes.HANDLE
     fn_CreateJobObjectW.argtypes = [ctypes.c_void_p, ctypes.c_wchar_p]
-    job: Optional[ctypes.wintypes.HANDLE] = fn_CreateJobObjectW(attributes, name)
+    job = fn_CreateJobObjectW(attributes, name)
     if job is None:
         # Automatically calls GetLastError and FormatError for us to create the
         # WindowsError exception.
@@ -87,33 +86,27 @@
     return job
 
 
-def SetInformationJobObject(
-    job: ctypes.wintypes.HANDLE, info_class: int, info: ctypes.Structure
-) -> None:
+def SetInformationJobObject(job: ctypes.wintypes.HANDLE, info_class: int,
+                            info: ctypes.Structure) -> None:
     fn_SetInformationJobObject = _LOADER.kernel32.SetInformationJobObject
     fn_SetInformationJobObject.restype = ctypes.wintypes.BOOL
     fn_SetInformationJobObject.argtypes = [
         ctypes.wintypes.HANDLE,
         ctypes.c_int,
         ctypes.c_void_p,
-        ctypes.wintypes.DWORD,
+        ctypes.wintypes.DWORD
     ]
-    result = fn_SetInformationJobObject(
-        job, info_class, ctypes.pointer(info), ctypes.sizeof(info)
-    )
+    result = fn_SetInformationJobObject(job, info_class, ctypes.pointer(info),
+                                        ctypes.sizeof(info))
     if not result:
         raise ctypes.WinError(ctypes.get_last_error())  # type: ignore
 
 
-def AssignProcessToJobObject(
-    job: ctypes.wintypes.HANDLE, process: ctypes.wintypes.HANDLE
-) -> None:
+def AssignProcessToJobObject(job: ctypes.wintypes.HANDLE,
+                             process: ctypes.wintypes.HANDLE) -> None:
     fn_AssignProcessToJobObject = _LOADER.kernel32.AssignProcessToJobObject
     fn_AssignProcessToJobObject.restype = ctypes.wintypes.BOOL
-    fn_AssignProcessToJobObject.argtypes = [
-        ctypes.wintypes.HANDLE,
-        ctypes.wintypes.HANDLE,
-    ]
+    fn_AssignProcessToJobObject.argtypes = [ctypes.wintypes.HANDLE, ctypes.wintypes.HANDLE]
     if not fn_AssignProcessToJobObject(job, process):
         raise ctypes.WinError(ctypes.get_last_error())  # type: ignore
 
@@ -121,8 +114,7 @@
 def GetCurrentProcess() -> ctypes.wintypes.HANDLE:
     fn_GetCurrentProcess = _LOADER.kernel32.GetCurrentProcess
     fn_GetCurrentProcess.restype = ctypes.wintypes.HANDLE
-    handle: ctypes.wintypes.HANDLE = fn_GetCurrentProcess()
-    return handle
+    return fn_GetCurrentProcess()
 
 
 def CloseHandle(handle: ctypes.wintypes.HANDLE) -> None:
diff --git a/ndk/workqueue.py b/ndk/workqueue.py
index c6ea916..d96e522 100644
--- a/ndk/workqueue.py
+++ b/ndk/workqueue.py
@@ -14,42 +14,35 @@
 # limitations under the License.
 #
 """Defines WorkQueue for delegating asynchronous work to subprocesses."""
-from __future__ import annotations
+from __future__ import absolute_import
 
+import ctypes  # pylint: disable=unused-import
 import collections
+import itertools
 import logging
 import multiprocessing
 import multiprocessing.managers
 import os
+from queue import Queue
 import signal
 import sys
 import traceback
-from abc import ABC, abstractmethod
-from collections.abc import Hashable
-from queue import Queue
 from types import FrameType
 from typing import (
     Any,
     Callable,
-    Concatenate,
     Deque,
     Dict,
-    Generic,
     Iterable,
     List,
+    Mapping,
     Optional,
-    ParamSpec,
-    TypeVar,
+    Tuple,
     Union,
 )
 
-IS_WINDOWS = sys.platform == "win32"
 
-
-if IS_WINDOWS:
-    import ctypes.wintypes
-
-    ProcessGroup = Optional[ctypes.wintypes.HANDLE]
+ProcessGroup = Optional['ctypes.wintypes.HANDLE']
 
 
 def logger() -> logging.Logger:
@@ -57,7 +50,7 @@
     return logging.getLogger(__name__)
 
 
-def worker_sigterm_handler(_signum: int, _frame: Optional[FrameType]) -> None:
+def worker_sigterm_handler(_signum: int, _frame: FrameType) -> None:
     """Raises SystemExit so atexit/finally handlers can be executed."""
     sys.exit()
 
@@ -76,36 +69,32 @@
 
 def create_windows_process_group() -> ProcessGroup:
     """Creates a Windows process group for this process."""
-    import ndk.win32  # pylint: disable=import-outside-toplevel
-
+    import ndk.win32
     job = ndk.win32.CreateJobObject()
 
     limit_info = ndk.win32.JOBOBJECT_EXTENDED_LIMIT_INFORMATION(
         BasicLimitInformation=ndk.win32.JOBOBJECT_BASIC_LIMIT_INFORMATION(
-            LimitFlags=ndk.win32.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
-        )
-    )
+            LimitFlags=ndk.win32.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE))
 
     ndk.win32.SetInformationJobObject(
-        job, ndk.win32.JobObjectExtendedLimitInformation, limit_info
-    )
+        job, ndk.win32.JobObjectExtendedLimitInformation, limit_info)
     ndk.win32.AssignProcessToJobObject(job, ndk.win32.GetCurrentProcess())
     return job
 
 
 def assign_self_to_new_process_group() -> ProcessGroup:
     """Assigns this process to a new process group."""
-    if sys.platform == "win32":
+    if sys.platform == 'win32':
         return create_windows_process_group()
-    os.setpgrp()
-    return None
+    else:
+        os.setpgrp()
+        return None
 
 
 def kill_process_group(group: ProcessGroup) -> None:
     """Kills the process group."""
-    if sys.platform == "win32":
-        import ndk.win32  # pylint: disable=import-outside-toplevel
-
+    if sys.platform == 'win32':
+        import ndk.win32
         ndk.win32.CloseHandle(group)
     else:
         os.kill(0, signal.SIGTERM)
@@ -114,16 +103,11 @@
 class Worker:
     """A workqueue task executor."""
 
-    IDLE_STATUS = "IDLE"
-    EXCEPTION_STATUS = "EXCEPTION"
+    IDLE_STATUS = 'IDLE'
+    EXCEPTION_STATUS = 'EXCEPTION'
 
-    def __init__(
-        self,
-        data: Any,
-        task_queue: Queue[Task],
-        result_queue: Queue[Any],
-        manager: multiprocessing.managers.SyncManager,
-    ) -> None:
+    def __init__(self, data: Any, task_queue: Queue, result_queue: Queue,
+                 manager: multiprocessing.managers.SyncManager) -> None:
         """Creates a Worker object.
 
         Args:
@@ -135,7 +119,7 @@
         self.result_queue = result_queue
         # For multiprocess.Manager.Value, the type is actually ignored.
         # https://stackoverflow.com/a/21290961/632035
-        self._status = manager.Value("", self.IDLE_STATUS)
+        self._status = manager.Value('', self.IDLE_STATUS)
         self._status_lock = manager.Lock()
         self.process = multiprocessing.Process(target=self.main)
 
@@ -146,7 +130,7 @@
             # Typeshed has a seemingly incorrect definition of
             # SyncManager.Value that just returns the wrapped type rather than
             # the proxy type.
-            return self._status.value
+            return self._status.value  # type: ignore
 
     @status.setter
     def status(self, value: str) -> None:
@@ -155,7 +139,7 @@
             # Typeshed has a seemingly incorrect definition of
             # SyncManager.Value that just returns the wrapped type rather than
             # the proxy type.
-            self._status.value = value
+            self._status.value = value  # type: ignore
 
     def put_result(self, result: Any, status: str) -> None:
         """Puts a result onto the result queue."""
@@ -163,7 +147,7 @@
             # Typeshed has a seemingly incorrect definition of
             # SyncManager.Value that just returns the wrapped type rather than
             # the proxy type.
-            self._status.value = status
+            self._status.value = status  # type: ignore
         self.result_queue.put(result)
 
     @property
@@ -193,40 +177,32 @@
         signal.signal(signal.SIGTERM, worker_sigterm_handler)
         try:
             while True:
-                logger().debug("worker %d waiting for work", os.getpid())
+                logger().debug('worker %d waiting for work', os.getpid())
                 task = self.task_queue.get()
-                logger().debug("worker %d running task", os.getpid())
+                logger().debug('worker %d running task', os.getpid())
                 result = task.run(self)
-                logger().debug("worker %d putting result", os.getpid())
+                logger().debug('worker %d putting result', os.getpid())
                 self.put_result(result, self.IDLE_STATUS)
         except SystemExit:
             pass
         except:  # pylint: disable=bare-except
-            logger().debug("worker %d raised exception", os.getpid())
-            trace = "".join(traceback.format_exception(*sys.exc_info()))
+            logger().debug('worker %d raised exception', os.getpid())
+            trace = ''.join(traceback.format_exception(*sys.exc_info()))
             self.put_result(TaskError(trace), self.EXCEPTION_STATUS)
         finally:
             # multiprocessing.Process.terminate() doesn't kill our descendents.
             signal.signal(signal.SIGTERM, signal.SIG_IGN)
-            logger().debug("worker %d killing process group", os.getpid())
+            logger().debug('worker %d killing process group', os.getpid())
             kill_process_group(group)
             signal.signal(signal.SIGTERM, signal.SIG_DFL)
-        logger().debug("worker %d exiting", os.getpid())
-
-
-ResultT = TypeVar("ResultT")
-ParamT = ParamSpec("ParamT")
+        logger().debug('worker %d exiting', os.getpid())
 
 
 class Task:
     """A task to be executed by a worker process."""
 
-    def __init__(
-        self,
-        func: Callable[Concatenate[Worker, ParamT], ResultT],
-        *args: ParamT.args,
-        **kwargs: ParamT.kwargs,
-    ) -> None:
+    def __init__(self, func: Callable[..., Any], args: Tuple,
+                 kwargs: Mapping[Any, Any]) -> None:
         """Creates a task.
 
         Args:
@@ -248,13 +224,11 @@
 
     join_timeout = 8  # Timeout for join before trying SIGKILL.
 
-    def __init__(
-        self,
-        num_workers: int = multiprocessing.cpu_count(),
-        task_queue: Optional[Queue[Task]] = None,
-        result_queue: Optional[Queue[Any]] = None,
-        worker_data: Optional[Any] = None,
-    ) -> None:
+    def __init__(self,
+                 num_workers: int = multiprocessing.cpu_count(),
+                 task_queue: Optional[Queue] = None,
+                 result_queue: Optional[Queue] = None,
+                 worker_data: Optional[Any] = None) -> None:
         """Creates a WorkQueue.
 
         Worker threads are spawned immediately and remain live until both
@@ -295,12 +269,8 @@
         self.num_tasks = 0
         self._spawn_workers(num_workers)
 
-    def add_task(
-        self,
-        func: Callable[Concatenate[Worker, ParamT], ResultT],
-        *args: ParamT.args,
-        **kwargs: ParamT.kwargs,
-    ) -> None:
+    def add_task(self, func: Callable[..., Any], *args: Any,
+                 **kwargs: Any) -> None:
         """Queues up a new task for execution.
 
         Tasks are executed in order of insertion as worker processes become
@@ -311,7 +281,7 @@
             args: Arguments to be passed to the task.
             kwargs: Keyword arguments to be passed to the task.
         """
-        self.task_queue.put(Task(func, *args, **kwargs))
+        self.task_queue.put(Task(func, args, kwargs))
         self.num_tasks += 1
 
     def get_result(self) -> Any:
@@ -322,40 +292,20 @@
         self.num_tasks -= 1
         return result
 
-    @property
-    def has_pending_results(self) -> bool:
-        return not self.result_queue.empty()
-
-    # TODO: Make ProcessPoolWorkQueue a descendant of BaseWorkQueue and dedup.
-    # We can't trivially make this change because ProcessPoolWorkQueue isn't currently
-    # type safe for its task result, and a single instance may be used for multiple
-    # result types.
-    def get_results(self) -> list[ResultT]:
-        """Gets all pending results.
-
-        If no results are available, this will block until at least one is
-        available. It will then continue dequeing until the queue is empty, and
-        then return.
-        """
-        results: list[ResultT] = []
-        results.append(self.get_result())
-        while self.has_pending_results:
-            results.append(self.get_result())
-        return results
-
     def terminate(self) -> None:
         """Terminates all worker processes."""
         for worker in self.workers:
-            logger().debug("terminating %d", worker.pid)
+            logger().debug('terminating %d', worker.pid)
             worker.terminate()
 
     def join(self) -> None:
         """Waits for all worker processes to exit."""
         for worker in self.workers:
-            logger().debug("joining %d", worker.pid)
+            logger().debug('joining %d', worker.pid)
             worker.join(self.join_timeout)
             if worker.is_alive():
-                logger().error("worker %d will not die; sending SIGKILL", worker.pid)
+                logger().error(
+                    'worker %d will not die; sending SIGKILL', worker.pid)
                 if worker.pid is not None:
                     os.killpg(worker.pid, signal.SIGKILL)
                 worker.join()
@@ -373,69 +323,37 @@
         """
         for _ in range(num_workers):
             worker = Worker(
-                self.worker_data, self.task_queue, self.result_queue, self.manager
-            )
+                self.worker_data, self.task_queue, self.result_queue,
+                self.manager)
             worker.start()
             self.workers.append(worker)
 
 
-class BasicWorker:
-    """A worker for a BasicWorkQueue."""
-
+class DummyWorker:
+    """A worker for a dummy workqueue."""
     def __init__(self, data: Any) -> None:
         self.data = data
 
 
-class BaseWorkQueue(ABC, Generic[ResultT]):
-    @abstractmethod
-    def get_result(self) -> ResultT: ...
+class DummyWorkQueue:
+    """A fake WorkQueue that does not parallelize.
 
-    @property
-    @abstractmethod
-    def has_pending_results(self) -> bool: ...
-
-    def get_results(self) -> list[ResultT]:
-        """Gets all pending results.
-
-        If no results are available, this will block until at least one is
-        available. It will then continue dequeing until the queue is empty, and
-        then return.
-        """
-        results: list[ResultT] = []
-        results.append(self.get_result())
-        while self.has_pending_results:
-            results.append(self.get_result())
-        return results
-
-
-class BasicWorkQueue(BaseWorkQueue[ResultT]):
-    """A WorkQueue that does not delegate.
-
-    This is the simplest possible implementation of a workqueue, performing all
-    the work on the same thread. Useful for debugging when trying to determine
-    if an issue is being caused by synchronization or IPC issues.
+    Useful for debugging when trying to determine if an issue is being caused
+    by multiprocess specific behavior.
     """
-
     # pylint: disable=unused-argument
-    def __init__(
-        self,
-        num_workers: Optional[int] = None,
-        task_queue: Optional[Queue[Task]] = None,
-        result_queue: Optional[Queue[ResultT]] = None,
-        worker_data: Optional[Any] = None,
-    ) -> None:
+    def __init__(self,
+                 num_workers: int = None,
+                 task_queue: Optional[Queue] = None,
+                 result_queue: Optional[Queue] = None,
+                 worker_data: Optional[Any] = None) -> None:
         """Creates a SerialWorkQueue."""
-        self.task_queue: Deque[Task] = collections.deque()
+        self.task_queue: Deque = collections.deque()
         self.worker_data = worker_data
-
     # pylint: enable=unused-argument
 
-    def add_task(
-        self,
-        func: Callable[Concatenate[Worker, ParamT], ResultT],
-        *args: ParamT.args,
-        **kwargs: ParamT.kwargs,
-    ) -> None:
+    def add_task(self, func: Callable[..., Any], *args: Any,
+                 **kwargs: Any) -> None:
         """Queues up a new task for execution.
 
         Tasks are executed when get_result is called.
@@ -445,16 +363,16 @@
             args: Arguments to be passed to the task.
             kwargs: Keyword arguments to be passed to the task.
         """
-        self.task_queue.append(Task(func, *args, **kwargs))
+        self.task_queue.append(Task(func, args, kwargs))
 
     def get_result(self) -> Any:
         """Executes a task and returns the result."""
         task = self.task_queue.popleft()
         try:
-            return task.run(BasicWorker(self.worker_data))
-        except Exception as ex:
-            trace = "".join(traceback.format_exception(*sys.exc_info()))
-            raise TaskError(trace) from ex
+            return task.run(DummyWorker(self.worker_data))
+        except:
+            trace = ''.join(traceback.format_exception(*sys.exc_info()))
+            raise TaskError(trace)
 
     def terminate(self) -> None:
         """Does nothing."""
@@ -468,10 +386,6 @@
         return len(self.task_queue)
 
     @property
-    def has_pending_results(self) -> bool:
-        return False
-
-    @property
     def workers(self) -> List[Worker]:
         """List of workers."""
         return []
@@ -481,24 +395,85 @@
         return self.num_tasks == 0
 
 
-ShardT = TypeVar("ShardT")
+class LoadRestrictingWorkQueue:
+    """Specialized work queue for building tests.
 
+    Building the libc++ tests is very demanding and we should not be running
+    more than one libc++ build at a time. The LoadRestrictingWorkQueue has a
+    normal task queue as well as a task queue served by only one worker.
+    """
 
-class ShardingGroup(Hashable, Generic[ShardT]):
+    def __init__(self, num_workers: int = multiprocessing.cpu_count()) -> None:
+        self.manager = multiprocessing.Manager()
+        self.result_queue = self.manager.Queue()
+
+        assert num_workers >= 2
+
+        self.main_task_queue = self.manager.Queue()
+        self.restricted_task_queue = self.manager.Queue()
+
+        self.main_work_queue = WorkQueue(
+            num_workers - 1, task_queue=self.main_task_queue,
+            result_queue=self.result_queue)
+
+        self.restricted_work_queue = WorkQueue(
+            1, task_queue=self.restricted_task_queue,
+            result_queue=self.result_queue)
+
+        self.num_tasks = 0
+
+    def add_task(self, func: Callable[..., Any], *args: Any,
+                 **kwargs: Any) -> None:
+        self.main_task_queue.put(Task(func, args, kwargs))
+        self.num_tasks += 1
+
+    def add_load_restricted_task(self, func: Callable[..., Any], *args: Any,
+                                 **kwargs: Any) -> None:
+        self.restricted_task_queue.put(Task(func, args, kwargs))
+        self.num_tasks += 1
+
+    def get_result(self) -> Any:
+        """Gets a result from the queue, blocking until one is available."""
+        result = self.result_queue.get()
+        if isinstance(result, TaskError):
+            raise result
+        self.num_tasks -= 1
+        return result
+
+    def terminate(self) -> None:
+        self.main_work_queue.terminate()
+        self.restricted_work_queue.terminate()
+
+    def join(self) -> None:
+        self.main_work_queue.join()
+        self.restricted_work_queue.join()
+
     @property
-    def shards(self) -> list[ShardT]:
+    def workers(self) -> List[Worker]:
+        """List of workers."""
+        return list(
+            itertools.chain(self.main_work_queue.workers,
+                            self.restricted_work_queue.workers))
+
+    def finished(self) -> bool:
+        """Returns True if all tasks have completed execution."""
+        return self.num_tasks == 0
+
+
+class ShardingGroup:
+    @property
+    def shards(self) -> List[Any]:
         raise NotImplementedError
 
 
-class ShardingWorkQueue(BaseWorkQueue[ResultT], Generic[ResultT, ShardT]):
-    def __init__(
-        self, device_groups: Iterable[ShardingGroup[ShardT]], procs_per_device: int
-    ) -> None:
+class ShardingWorkQueue:
+    def __init__(self, device_groups: Iterable[ShardingGroup],
+                 procs_per_device: int) -> None:
         self.manager = multiprocessing.Manager()
         self.result_queue = self.manager.Queue()
-        self.task_queues: Dict[ShardingGroup[ShardT], Queue[Task]] = {}
+        self.task_queues: Dict[ShardingGroup, Queue] = {}
 
-        self.work_queues: Dict[ShardingGroup[ShardT], Dict[Any, WorkQueue]] = {}
+        self.work_queues: Dict[ShardingGroup, Dict[Any, WorkQueue]] = {}
         self.num_tasks = 0
         for group in device_groups:
             self.work_queues[group] = {}
@@ -508,17 +483,12 @@
                     procs_per_device,
                     task_queue=self.task_queues[group],
                     result_queue=self.result_queue,
-                    worker_data=[shard],
-                )
+                    worker_data=[shard])
 
-    def add_task(
-        self,
-        group: ShardingGroup[ShardT],
-        func: Callable[Concatenate[Worker, ParamT], ResultT],
-        *args: ParamT.args,
-        **kwargs: ParamT.kwargs,
-    ) -> None:
-        self.task_queues[group].put(Task(func, *args, **kwargs))
+    def add_task(self, group: ShardingGroup, func: Callable[..., Any],
+                 *args: Any, **kwargs: Any) -> None:
+        self.task_queues[group].put(
+            Task(func, args, kwargs))
         self.num_tasks += 1
 
     def get_result(self) -> Any:
@@ -539,14 +509,10 @@
             for work_queue in group_queues.values():
                 work_queue.join()
 
-    @property
-    def has_pending_results(self) -> bool:
-        return not self.result_queue.empty()
-
     def finished(self) -> bool:
         """Returns True if all tasks have completed execution."""
         return self.num_tasks == 0
 
 
 WorkQueue = ProcessPoolWorkQueue
-AnyWorkQueue = Union[BasicWorkQueue[Any], ProcessPoolWorkQueue]
+AnyWorkQueue = Union[DummyWorkQueue, LoadRestrictingWorkQueue, ProcessPoolWorkQueue]
diff --git a/ndkgdb.py b/ndkgdb.py
deleted file mode 100755
index 5e78f16..0000000
--- a/ndkgdb.py
+++ /dev/null
@@ -1,998 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2015 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from __future__ import print_function
-
-import argparse
-import contextlib
-import logging
-import os
-import posixpath
-import signal
-import subprocess
-import sys
-import textwrap
-import time
-from collections.abc import Iterator
-from typing import NoReturn
-from xml.etree import ElementTree
-from xml.etree.ElementTree import Element
-
-import adb
-import gdbrunner
-
-NDK_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
-
-
-def log(msg: str) -> None:
-    logger = logging.getLogger(__name__)
-    logger.info(msg)
-
-
-def enable_verbose_logging() -> None:
-    logger = logging.getLogger(__name__)
-    handler = logging.StreamHandler(sys.stdout)
-    formatter = logging.Formatter()
-
-    handler.setFormatter(formatter)
-    logger.addHandler(handler)
-    logger.propagate = False
-
-    logger.setLevel(logging.INFO)
-
-
-def error(msg: str) -> NoReturn:
-    sys.exit("ERROR: {}".format(msg))
-
-
-class ArgumentParser(gdbrunner.ArgumentParser):
-    def __init__(self) -> None:
-        super().__init__()
-        self.add_argument(
-            "--verbose", "-v", action="store_true", help="enable verbose mode"
-        )
-
-        self.add_argument(
-            "--force",
-            "-f",
-            action="store_true",
-            help="kill existing debug session if it exists",
-        )
-
-        self.add_argument(
-            "--port",
-            type=int,
-            nargs="?",
-            default="5039",
-            help="override the port used on the host.",
-        )
-
-        self.add_argument(
-            "--delay",
-            type=float,
-            default=0.25,
-            help="delay in seconds to wait after starting activity.\n"
-            "defaults to 0.25, higher values may be needed on slower devices.",
-        )
-
-        self.add_argument(
-            "-p", "--project", dest="project", help="specify application project path"
-        )
-
-        lldb_group = self.add_mutually_exclusive_group()
-        lldb_group.add_argument("--lldb", action="store_true", help="Use lldb.")
-        lldb_group.add_argument(
-            "--no-lldb", action="store_true", help="Do not use lldb."
-        )
-
-        app_group = self.add_argument_group("target selection")
-        start_group = app_group.add_mutually_exclusive_group()
-
-        start_group.add_argument(
-            "--attach",
-            nargs="?",
-            dest="package_name",
-            metavar="PKG_NAME",
-            help="attach to application (default)\n"
-            "autodetects PKG_NAME if not specified",
-        )
-
-        # NB: args.launch can be False (--attach), None (--launch), or a string
-        start_group.add_argument(
-            "--launch",
-            nargs="?",
-            dest="launch",
-            default=False,
-            metavar="ACTIVITY",
-            help="launch application activity\n"
-            "launches main activity if ACTIVITY not specified",
-        )
-
-        start_group.add_argument(
-            "--launch-list",
-            action="store_true",
-            help="list all launchable activity names from manifest",
-        )
-
-        debug_group = self.add_argument_group("debugging options")
-        debug_group.add_argument(
-            "-x",
-            "--exec",
-            dest="exec_file",
-            help="execute gdb commands in EXEC_FILE after connection",
-        )
-
-        debug_group.add_argument(
-            "--nowait",
-            action="store_true",
-            help="do not wait for debugger to attach (may miss early JNI "
-            "breakpoints)",
-        )
-
-        if sys.platform.startswith("win"):
-            tui_help = argparse.SUPPRESS
-        else:
-            tui_help = "use GDB's tui mode"
-
-        debug_group.add_argument(
-            "-t", "--tui", action="store_true", dest="tui", help=tui_help
-        )
-
-
-def extract_package_name(xmlroot: Element) -> str:
-    if "package" in xmlroot.attrib:
-        return xmlroot.attrib["package"]
-    error("Failed to find package name in AndroidManifest.xml")
-
-
-ANDROID_XMLNS = "{http://schemas.android.com/apk/res/android}"
-
-
-def extract_launchable(xmlroot: Element) -> list[str]:
-    """
-    A given application can have several activities, and each activity
-    can have several intent filters. We want to only list, in the final
-    output, the activities which have a intent-filter that contains the
-    following elements:
-
-      <action android:name="android.intent.action.MAIN" />
-      <category android:name="android.intent.category.LAUNCHER" />
-    """
-    launchable_activities = []
-    application = xmlroot.findall("application")[0]
-
-    main_action = "android.intent.action.MAIN"
-    launcher_category = "android.intent.category.LAUNCHER"
-    name_attrib = "{}name".format(ANDROID_XMLNS)
-
-    # pylint: disable=too-many-nested-blocks
-    for activity in application.iter("activity"):
-        if name_attrib not in activity.attrib:
-            continue
-
-        for intent_filter in activity.iter("intent-filter"):
-            found_action = False
-            found_category = False
-            for child in intent_filter:
-                if child.tag == "action":
-                    if not found_action and name_attrib in child.attrib:
-                        if child.attrib[name_attrib] == main_action:
-                            found_action = True
-                if child.tag == "category":
-                    if not found_category and name_attrib in child.attrib:
-                        if child.attrib[name_attrib] == launcher_category:
-                            found_category = True
-            if found_action and found_category:
-                launchable_activities.append(activity.attrib[name_attrib])
-    return launchable_activities
-
-
-def ndk_bin_path() -> str:
-    return os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-
-
-def handle_args() -> argparse.Namespace:
-    def find_program(program: str, paths: list[str]) -> str | None:
-        """Find a binary in paths"""
-        exts = [""]
-        if sys.platform.startswith("win"):
-            exts += [".exe", ".bat", ".cmd"]
-        for path in paths:
-            if os.path.isdir(path):
-                for ext in exts:
-                    full = path + os.sep + program + ext
-                    if os.path.isfile(full):
-                        return full
-        return None
-
-    # FIXME: This is broken for PATH that contains quoted colons.
-    paths = os.environ["PATH"].replace('"', "").split(os.pathsep)
-
-    args: argparse.Namespace = ArgumentParser().parse_args()
-
-    if args.tui and sys.platform.startswith("win"):
-        error("TUI is unsupported on Windows.")
-
-    ndk_bin = ndk_bin_path()
-    args.make_cmd = find_program("make", [ndk_bin])
-    args.jdb_cmd = find_program("jdb", paths)
-    if args.make_cmd is None:
-        error("Failed to find make in '{}'".format(ndk_bin))
-    if args.jdb_cmd is None:
-        print("WARNING: Failed to find jdb on your path, defaulting to --nowait")
-        args.nowait = True
-
-    if args.verbose:
-        enable_verbose_logging()
-
-    return args
-
-
-def find_project(args: argparse.Namespace) -> str:
-    manifest_name = "AndroidManifest.xml"
-    project: str | None = args.project
-    if project is not None:
-        log("Using project directory: {}".format(args.project))
-        args.project = os.path.realpath(os.path.expanduser(args.project))
-        if not os.path.exists(os.path.join(args.project, manifest_name)):
-            msg = "could not find AndroidManifest.xml in '{}'"
-            error(msg.format(args.project))
-    else:
-        # Walk upwards until we find AndroidManifest.xml, or run out of path.
-        current_dir = os.getcwd()
-        while not os.path.exists(os.path.join(current_dir, manifest_name)):
-            parent_dir = os.path.dirname(current_dir)
-            if parent_dir == current_dir:
-                error(
-                    "Could not find AndroidManifest.xml in current"
-                    " directory or a parent directory.\n"
-                    "       Launch this script from inside a project, or"
-                    " use --project=<path>."
-                )
-            current_dir = parent_dir
-        args.project = current_dir
-        log("Using project directory: {} ".format(args.project))
-    assert project is not None
-    args.manifest_path = os.path.join(project, manifest_name)
-    args.project = project
-    return project
-
-
-def canonicalize_activity(package_name: str, activity_name: str) -> str:
-    if activity_name.startswith("."):
-        return "{}{}".format(package_name, activity_name)
-    return activity_name
-
-
-def parse_manifest(args: argparse.Namespace) -> None:
-    manifest = ElementTree.parse(args.manifest_path)
-    manifest_root = manifest.getroot()
-    package_name = extract_package_name(manifest_root)
-    log("Found package name: {}".format(package_name))
-
-    activities = extract_launchable(manifest_root)
-    activities = [canonicalize_activity(package_name, a) for a in activities]
-
-    if args.launch_list:
-        print("Launchable activities: {}".format(", ".join(activities)))
-        sys.exit(0)
-
-    args.activities = activities
-    args.package_name = package_name
-
-
-def select_target(args: argparse.Namespace) -> str:
-    assert args.launch
-
-    if len(args.activities) == 0:
-        error("No launchable activities found.")
-
-    target: str
-    if args.launch is None:
-        target = args.activities[0]
-
-        if len(args.activities) > 1:
-            print(
-                "WARNING: Multiple launchable activities found, choosing"
-                " '{}'.".format(args.activities[0])
-            )
-    else:
-        activity_name = canonicalize_activity(args.package_name, args.launch)
-
-        if activity_name not in args.activities:
-            msg = "Could not find launchable activity: '{}'."
-            error(msg.format(activity_name))
-        target = activity_name
-    return target
-
-
-@contextlib.contextmanager
-def cd(path: str) -> Iterator[None]:
-    curdir = os.getcwd()
-    os.chdir(path)
-    os.environ["PWD"] = path
-    try:
-        yield
-    finally:
-        os.environ["PWD"] = curdir
-        os.chdir(curdir)
-
-
-def dump_var(args: argparse.Namespace, variable: str, abi: str | None = None) -> str:
-    make_args = [
-        args.make_cmd,
-        "--no-print-dir",
-        "-f",
-        os.path.join(NDK_PATH, "build/core/build-local.mk"),
-        "-C",
-        args.project,
-        "DUMP_{}".format(variable),
-    ]
-
-    if abi is not None:
-        make_args.append("APP_ABI={}".format(abi))
-
-    with cd(args.project):
-        try:
-            make_output = subprocess.check_output(make_args, cwd=args.project)
-        except subprocess.CalledProcessError:
-            error("Failed to retrieve application ABI from Android.mk.")
-    return make_output.splitlines()[-1].decode()
-
-
-def get_api_level(device: adb.AndroidDevice) -> int:
-    # Check the device API level
-    try:
-        api_str = device.get_prop("ro.build.version.sdk")
-        if api_str is None:
-            raise KeyError
-        api_level = int()
-    except (ValueError, KeyError):
-        error(
-            "Failed to find target device's supported API level.\n"
-            "ndk-gdb only supports devices running Android 2.2 or higher."
-        )
-    if api_level < 8:
-        error(
-            "ndk-gdb only supports devices running Android 2.2 or higher.\n"
-            "(expected API level 8, actual: {})".format(api_level)
-        )
-
-    return api_level
-
-
-def fetch_abi(args: argparse.Namespace) -> str:
-    """
-    Figure out the intersection of which ABIs the application is built for and
-    which ones the device supports, then pick the one preferred by the device,
-    so that we know which gdbserver to push and run on the device.
-    """
-
-    app_abis = dump_var(args, "APP_ABI").split(" ")
-    if "all" in app_abis:
-        app_abis = dump_var(args, "NDK_ALL_ABIS").split(" ")
-    app_abis_msg = "Application ABIs: {}".format(", ".join(app_abis))
-    log(app_abis_msg)
-
-    new_abi_props = ["ro.product.cpu.abilist"]
-    old_abi_props = ["ro.product.cpu.abi", "ro.product.cpu.abi2"]
-    abi_props = new_abi_props
-    if args.device.get_prop("ro.product.cpu.abilist") is None:
-        abi_props = old_abi_props
-
-    device_abis: list[str] = []
-    for key in abi_props:
-        value = args.device.get_prop(key)
-        if value is not None:
-            device_abis.extend(value.split(","))
-
-    device_abis_msg = "Device ABIs: {}".format(", ".join(device_abis))
-    log(device_abis_msg)
-
-    for abi in device_abis:
-        if abi in app_abis:
-            # TODO(jmgao): Do we expect gdb to work with ARM-x86 translation?
-            log("Selecting ABI: {}".format(abi))
-            return abi
-
-    msg = "Application cannot run on the selected device."
-
-    # Don't repeat ourselves.
-    if not args.verbose:
-        msg += "\n{}\n{}".format(app_abis_msg, device_abis_msg)
-
-    error(msg)
-
-
-def get_run_as_cmd(user: str, cmd: list[str]) -> list[str]:
-    return ["run-as", user] + cmd
-
-
-def get_app_data_dir(args: argparse.Namespace, package_name: str) -> str:
-    cmd = ["/system/bin/sh", "-c", "pwd", "2>/dev/null"]
-    cmd = get_run_as_cmd(package_name, cmd)
-    device: adb.AndroidDevice = args.device
-    (rc, stdout, _) = device.shell_nocheck(cmd)
-    if rc != 0:
-        error(
-            "Could not find application's data directory. Are you sure that "
-            "the application is installed and debuggable?"
-        )
-    data_dir = stdout.strip()
-
-    # Applications with minSdkVersion >= 24 will have their data directories
-    # created with rwx------ permissions, preventing adbd from forwarding to
-    # the gdbserver socket. To be safe, if we're on a device >= 24, always
-    # chmod the directory.
-    if get_api_level(args.device) >= 24:
-        chmod_cmd = ["/system/bin/chmod", "a+x", data_dir]
-        chmod_cmd = get_run_as_cmd(package_name, chmod_cmd)
-        (rc, _, _) = args.device.shell_nocheck(chmod_cmd)
-        if rc != 0:
-            error("Failed to make application data directory world executable")
-
-    log("Found application data directory: {}".format(data_dir))
-    return data_dir
-
-
-def abi_to_arch(abi: str) -> str:
-    if abi.startswith("armeabi"):
-        return "arm"
-    if abi == "arm64-v8a":
-        return "arm64"
-    return abi
-
-
-def abi_to_llvm_arch(abi: str) -> str:
-    if abi.startswith("armeabi"):
-        return "arm"
-    if abi == "arm64-v8a":
-        return "aarch64"
-    if abi == "x86":
-        return "i386"
-    return "x86_64"
-
-
-def get_llvm_host_name() -> str:
-    platform = sys.platform
-    if platform.startswith("win"):
-        return "windows-x86_64"
-    if platform.startswith("darwin"):
-        return "darwin-x86_64"
-    return "linux-x86_64"
-
-
-def get_python_executable(toolchain_path: str) -> str:
-    if sys.platform.startswith("win"):
-        return os.path.join(toolchain_path, "python3", "python.exe")
-    return os.path.join(toolchain_path, "python3", "bin", "python3")
-
-
-def get_lldb_path(toolchain_path: str) -> str | None:
-    for lldb_name in ["lldb.sh", "lldb.cmd", "lldb", "lldb.exe"]:
-        debugger_path = os.path.join(toolchain_path, "bin", lldb_name)
-        if os.path.isfile(debugger_path):
-            return debugger_path
-    return None
-
-
-def get_llvm_package_version(llvm_toolchain_dir: str) -> str:
-    version_file_path = os.path.join(llvm_toolchain_dir, "AndroidVersion.txt")
-    try:
-        version_file = open(version_file_path, "r", encoding="utf-8")
-    except IOError:
-        error(
-            "Failed to open llvm package version file: '{}'.".format(version_file_path)
-        )
-
-    with version_file:
-        return version_file.readline().strip()
-
-
-def get_debugger_server_path(
-    args: argparse.Namespace,
-    package_name: str,
-    app_data_dir: str,
-    arch: str,
-    server_name: str,
-    local_path: str,
-) -> str:
-    app_debugger_server_path = "{}/lib/{}".format(app_data_dir, server_name)
-    cmd = ["ls", app_debugger_server_path, "2>/dev/null"]
-    cmd = get_run_as_cmd(package_name, cmd)
-    (rc, _, _) = args.device.shell_nocheck(cmd)
-    if rc == 0:
-        log("Found app {}: {}".format(server_name, app_debugger_server_path))
-        return app_debugger_server_path
-
-    # We need to upload our debugger server
-    log(
-        "App {} not found at {}, uploading.".format(
-            server_name, app_debugger_server_path
-        )
-    )
-    remote_path = "/data/local/tmp/{}-{}".format(arch, server_name)
-    args.device.push(local_path, remote_path)
-
-    # Copy debugger server into the data directory on M+, because selinux prevents
-    # execution of binaries directly from /data/local/tmp.
-    if get_api_level(args.device) >= 23:
-        destination = "{}/{}-{}".format(app_data_dir, arch, server_name)
-        log("Copying {} to {}.".format(server_name, destination))
-        cmd = [
-            "cat",
-            remote_path,
-            "|",
-            "run-as",
-            package_name,
-            "sh",
-            "-c",
-            "'cat > {}'".format(destination),
-        ]
-        (rc, _, _) = args.device.shell_nocheck(cmd)
-        if rc != 0:
-            error("Failed to copy {} to {}.".format(server_name, destination))
-        (rc, _, _) = args.device.shell_nocheck(
-            ["run-as", package_name, "chmod", "700", destination]
-        )
-        if rc != 0:
-            error("Failed to chmod {} at {}.".format(server_name, destination))
-
-        remote_path = destination
-
-    log("Uploaded {} to {}".format(server_name, remote_path))
-    return remote_path
-
-
-def pull_binaries(device: adb.AndroidDevice, out_dir: str, app_64bit: bool) -> None:
-    required_files = []
-    libraries = ["libc.so", "libm.so", "libdl.so"]
-
-    if app_64bit:
-        required_files = ["/system/bin/app_process64", "/system/bin/linker64"]
-        library_path = "/system/lib64"
-    else:
-        required_files = ["/system/bin/linker"]
-        library_path = "/system/lib"
-
-    for library in libraries:
-        required_files.append(posixpath.join(library_path, library))
-
-    for required_file in required_files:
-        # os.path.join not used because joining absolute paths will pick the last one
-        local_path = os.path.realpath(out_dir + required_file)
-        local_dirname = os.path.dirname(local_path)
-        if not os.path.isdir(local_dirname):
-            os.makedirs(local_dirname)
-        log("Pulling '{}' to '{}'".format(required_file, local_path))
-        device.pull(required_file, local_path)
-
-    # /system/bin/app_process is 32-bit on 32-bit devices, but a symlink to
-    # app_process64 on 64-bit. If we need the 32-bit version, try to pull
-    # app_process32, and if that fails, pull app_process.
-    if not app_64bit:
-        destination = os.path.realpath(out_dir + "/system/bin/app_process")
-        try:
-            device.pull("/system/bin/app_process32", destination)
-        except subprocess.CalledProcessError:
-            device.pull("/system/bin/app_process", destination)
-
-
-def generate_lldb_script(
-    args: argparse.Namespace,
-    sysroot: str,
-    binary_path: str,
-    app_64bit: bool,
-    jdb_pid: int,
-    llvm_toolchain_dir: str,
-) -> str:
-    lldb_commands = []
-    solib_search_paths = [
-        "{}/system/bin".format(sysroot),
-        "{}/system/lib{}".format(sysroot, "64" if app_64bit else ""),
-    ]
-    lldb_commands.append(
-        "settings append target.exec-search-paths {}".format(
-            " ".join(solib_search_paths)
-        )
-    )
-
-    lldb_commands.append("target create '{}'".format(binary_path))
-    lldb_commands.append("target modules search-paths add / {}/".format(sysroot))
-
-    lldb_commands.append("gdb-remote {}".format(args.port))
-    if jdb_pid is not None:
-        # After we've interrupted the app, reinvoke ndk-gdb.py to start jdb and
-        # wake up the app.
-        lldb_commands.append(
-            """
-script
-def start_jdb_to_unblock_app():
-  import subprocess
-  subprocess.Popen({})
-
-start_jdb_to_unblock_app()
-exit()
-    """.format(
-                repr(
-                    [
-                        # We can't use sys.executable because it is the python2.
-                        # lldb wrapper will set PYTHONHOME to point to python3.
-                        get_python_executable(llvm_toolchain_dir),
-                        os.path.realpath(__file__),
-                        "--internal-wakeup-pid-with-jdb",
-                        args.device.adb_path,
-                        args.device.serial,
-                        args.jdb_cmd,
-                        str(jdb_pid),
-                        str(bool(args.verbose)),
-                    ]
-                )
-            )
-        )
-
-    if args.tui:
-        lldb_commands.append("gui")
-
-    if args.exec_file is not None:
-        try:
-            exec_file = open(args.exec_file, "r", encoding="utf-8")
-        except IOError:
-            error("Failed to open lldb exec file: '{}'.".format(args.exec_file))
-
-        with exec_file:
-            lldb_commands.append(exec_file.read())
-
-    return "\n".join(lldb_commands)
-
-
-def generate_gdb_script(
-    args: argparse.Namespace,
-    sysroot: str,
-    binary_path: str,
-    app_64bit: bool,
-    jdb_pid: int,
-    connect_timeout: int = 5,
-) -> str:
-    if sys.platform.startswith("win"):
-        # GDB expects paths to use forward slashes.
-        sysroot = sysroot.replace("\\", "/")
-        binary_path = binary_path.replace("\\", "/")
-
-    gdb_commands = "set osabi GNU/Linux\n"
-    gdb_commands += "file '{}'\n".format(binary_path)
-
-    solib_search_paths = [sysroot, "{}/system/bin".format(sysroot)]
-    if app_64bit:
-        solib_search_paths.append("{}/system/lib64".format(sysroot))
-    else:
-        solib_search_paths.append("{}/system/lib".format(sysroot))
-    solib_search_path = os.pathsep.join(solib_search_paths)
-    gdb_commands += "set solib-absolute-prefix {}\n".format(sysroot)
-    gdb_commands += "set solib-search-path {}\n".format(solib_search_path)
-
-    # Try to connect for a few seconds, sometimes the device gdbserver takes
-    # a little bit to come up, especially on emulators.
-    gdb_commands += """
-python
-
-def target_remote_with_retry(target, timeout_seconds):
-  import time
-  end_time = time.time() + timeout_seconds
-  while True:
-    try:
-      gdb.execute('target remote ' + target)
-      return True
-    except gdb.error as e:
-      time_left = end_time - time.time()
-      if time_left < 0 or time_left > timeout_seconds:
-        print("Error: unable to connect to device.")
-        print(e)
-        return False
-      time.sleep(min(0.25, time_left))
-
-target_remote_with_retry(':{}', {})
-
-end
-""".format(
-        args.port, connect_timeout
-    )
-
-    if jdb_pid is not None:
-        # After we've interrupted the app, reinvoke ndk-gdb.py to start jdb and
-        # wake up the app.
-        gdb_commands += """
-python
-def start_jdb_to_unblock_app():
-  import subprocess
-  subprocess.Popen({})
-start_jdb_to_unblock_app()
-end
-    """.format(
-            repr(
-                [
-                    sys.executable,
-                    os.path.realpath(__file__),
-                    "--internal-wakeup-pid-with-jdb",
-                    args.device.adb_path,
-                    args.device.serial,
-                    args.jdb_cmd,
-                    str(jdb_pid),
-                    str(bool(args.verbose)),
-                ]
-            )
-        )
-
-    if args.exec_file is not None:
-        try:
-            exec_file = open(args.exec_file, "r", encoding="utf-8")
-        except IOError:
-            error("Failed to open GDB exec file: '{}'.".format(args.exec_file))
-
-        with exec_file:
-            gdb_commands += exec_file.read()
-
-    return gdb_commands
-
-
-def start_jdb(argv_subset: list[str]) -> None:
-    adb_path, serial, jdb_cmd, pid_str, verbose = argv_subset
-    pid = int(pid_str)
-    device = adb.get_device(serial, adb_path=adb_path)
-    if verbose == "True":
-        enable_verbose_logging()
-
-    log("Starting jdb to unblock application.")
-
-    # Do setup stuff to keep ^C in the parent from killing us.
-    signal.signal(signal.SIGINT, signal.SIG_IGN)
-    windows = sys.platform.startswith("win")
-    if not windows:
-        os.setpgrp()
-
-    jdb_port = 65534
-    device.forward("tcp:{}".format(jdb_port), "jdwp:{}".format(pid))
-    jdb_args = [
-        jdb_cmd,
-        "-connect",
-        "com.sun.jdi.SocketAttach:hostname=localhost,port={}".format(jdb_port),
-    ]
-
-    if sys.platform == "win32":
-        flags = subprocess.CREATE_NEW_PROCESS_GROUP
-    else:
-        flags = 0
-    jdb = subprocess.Popen(
-        jdb_args,
-        stdin=subprocess.PIPE,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        creationflags=flags,
-        text=True,
-    )
-
-    assert jdb.stdin is not None
-    assert jdb.stdout is not None
-    # Wait until jdb can communicate with the app. Once it can, the app will
-    # start polling for a Java debugger (e.g. every 200ms). We need to wait
-    # a while longer then so that the app notices jdb.
-    jdb_magic = "__verify_jdb_has_started__"
-    jdb.stdin.write('print "{}"\n'.format(jdb_magic))
-    saw_magic_str = False
-    while True:
-        line = jdb.stdout.readline()
-        if line == "":
-            break
-        log("jdb output: " + line.rstrip())
-        if jdb_magic in line and not saw_magic_str:
-            saw_magic_str = True
-            time.sleep(0.3)
-            jdb.stdin.write("exit\n")
-    jdb.wait()
-    if saw_magic_str:
-        log("JDB finished unblocking application.")
-    else:
-        log("error: did not find magic string in JDB output.")
-
-
-def advise_apk_debugging() -> None:
-    print("**Android Studio's debugger can be used for non-Studio projects.**")
-    print("See https://developer.android.com/studio/debug/apk-debugger")
-    print()
-    print(
-        textwrap.dedent(
-            """\
-            ndk-lldb is still usable for debugging command line Android tools or
-            ANT-based app builds, but it was never meant to handle other use
-            cases. Android Studio can debug your APK even if Android Studio
-            wasn't used to build the project, and this will be *much* easier
-            than using ndk-lldb in most circumstances.
-            """
-        )
-    )
-
-
-def main() -> None:
-    if sys.argv[1:2] == ["--internal-wakeup-pid-with-jdb"]:
-        start_jdb(sys.argv[2:])
-        return
-
-    advise_apk_debugging()
-
-    args = handle_args()
-    device = args.device
-    use_lldb = not args.no_lldb
-
-    if not use_lldb:
-        print("WARNING: --no-lldb was used but GDB is no longer supported.")
-        print("GDB will be used, but will be removed in the next release.")
-
-    if device is None:
-        error("Could not find a unique connected device/emulator.")
-
-    # Warn on old Pixel C firmware (b/29381985). Newer devices may have Yama
-    # enabled but still work with ndk-gdb (b/19277529).
-    yama_check = device.shell_nocheck(
-        ["cat", "/proc/sys/kernel/yama/ptrace_scope", "2>/dev/null"]
-    )
-    if (
-        yama_check[0] == 0
-        and yama_check[1].rstrip() not in ["", "0"]
-        and (device.get_prop("ro.build.product"), device.get_prop("ro.product.name"))
-        == ("dragon", "ryu")
-    ):
-        print(
-            "WARNING: The device uses Yama ptrace_scope to restrict debugging. ndk-gdb will"
-        )
-        print(
-            "    likely be unable to attach to a process. With root access, the restriction"
-        )
-        print(
-            "    can be lifted by writing 0 to /proc/sys/kernel/yama/ptrace_scope. Consider"
-        )
-        print("    upgrading your Pixel C to MXC89L or newer, where Yama is disabled.")
-
-    adb_version = subprocess.check_output(device.adb_cmd + ["version"]).decode()
-    log("ADB command used: '{}'".format(" ".join(device.adb_cmd)))
-    log("ADB version: {}".format(" ".join(adb_version.splitlines())))
-
-    project = find_project(args)
-    if args.package_name:
-        log("Attaching to specified package: {}".format(args.package_name))
-    else:
-        parse_manifest(args)
-
-    pkg_name = args.package_name
-
-    if args.launch is False:
-        log("Attaching to existing application process.")
-    else:
-        args.launch = select_target(args)
-        log("Selected target activity: '{}'".format(args.launch))
-
-    abi = fetch_abi(args)
-    arch = abi_to_arch(abi)
-
-    out_dir = os.path.join(project, (dump_var(args, "TARGET_OUT", abi)))
-    out_dir = os.path.realpath(out_dir)
-
-    app_data_dir = get_app_data_dir(args, pkg_name)
-
-    llvm_toolchain_dir = os.path.join(
-        NDK_PATH, "toolchains", "llvm", "prebuilt", get_llvm_host_name()
-    )
-    if use_lldb:
-        server_local_path = os.path.join(
-            llvm_toolchain_dir,
-            "lib64",
-            "clang",
-            get_llvm_package_version(llvm_toolchain_dir),
-            "lib",
-            "linux",
-            abi_to_llvm_arch(abi),
-            "lldb-server",
-        )
-        server_name = "lldb-server"
-    else:
-        server_local_path = "{}/prebuilt/android-{}/gdbserver/gdbserver"
-        server_local_path = server_local_path.format(NDK_PATH, arch)
-        server_name = "gdbserver"
-    if not os.path.exists(server_local_path):
-        error("Can not find {}: {}".format(server_name, server_local_path))
-    log("Using {}: {}".format(server_name, server_local_path))
-    debugger_server_path = get_debugger_server_path(
-        args, pkg_name, app_data_dir, arch, server_name, server_local_path
-    )
-
-    # Kill the process and gdbserver if requested.
-    if args.force:
-        kill_pids = gdbrunner.get_pids(device, debugger_server_path)
-        if args.launch:
-            kill_pids += gdbrunner.get_pids(device, pkg_name)
-        kill_pids = [str(pid) for pid in kill_pids]
-        if kill_pids:
-            log("Killing processes: {}".format(", ".join(kill_pids)))
-            device.shell_nocheck(["run-as", pkg_name, "kill", "-9"] + kill_pids)
-
-    # Launch the application if needed, and get its pid
-    if args.launch:
-        am_cmd = ["am", "start"]
-        if not args.nowait:
-            am_cmd.append("-D")
-        component_name = "{}/{}".format(pkg_name, args.launch)
-        am_cmd.append(component_name)
-        log("Launching activity {}...".format(component_name))
-        (rc, _, _) = device.shell_nocheck(am_cmd)
-        if rc != 0:
-            error("Failed to start {}".format(component_name))
-
-        if args.delay > 0.0:
-            log("Sleeping for {} seconds.".format(args.delay))
-            time.sleep(args.delay)
-
-    pids = gdbrunner.get_pids(device, pkg_name)
-    if len(pids) == 0:
-        error("Failed to find running process '{}'".format(pkg_name))
-    if len(pids) > 1:
-        error("Multiple running processes named '{}'".format(pkg_name))
-    pid = pids[0]
-
-    # Pull the linker, zygote, and notable system libraries
-    app_64bit = "64" in abi
-    pull_binaries(device, out_dir, app_64bit)
-    if app_64bit:
-        zygote_path = os.path.join(out_dir, "system", "bin", "app_process64")
-    else:
-        zygote_path = os.path.join(out_dir, "system", "bin", "app_process")
-
-    # Start gdbserver.
-    debug_socket = posixpath.join(app_data_dir, "debug_socket")
-    log("Starting {}...".format(server_name))
-    gdbrunner.start_gdbserver(
-        device,
-        None,
-        debugger_server_path,
-        target_pid=pid,
-        run_cmd=None,
-        debug_socket=debug_socket,
-        port=args.port,
-        run_as_cmd=["run-as", pkg_name],
-        lldb=use_lldb,
-    )
-
-    # Start jdb to unblock the application if necessary.
-    jdb_pid = pid if (args.launch and not args.nowait) else None
-
-    # Start gdb.
-    if use_lldb:
-        script_commands = generate_lldb_script(
-            args, out_dir, zygote_path, app_64bit, jdb_pid, llvm_toolchain_dir
-        )
-        debugger_path = get_lldb_path(llvm_toolchain_dir)
-        flags = []
-    else:
-        script_commands = generate_gdb_script(
-            args, out_dir, zygote_path, app_64bit, jdb_pid
-        )
-        debugger_path = os.path.join(ndk_bin_path(), "gdb")
-        flags = ["--tui"] if args.tui else []
-    print(debugger_path)
-    gdbrunner.start_gdb(debugger_path, script_commands, flags, lldb=use_lldb)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/ndkstack.py b/ndkstack.py
deleted file mode 100644
index dcbdc7b..0000000
--- a/ndkstack.py
+++ /dev/null
@@ -1,498 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Symbolizes stack traces from logcat.
-See https://developer.android.com/ndk/guides/ndk-stack for more information.
-"""
-
-from __future__ import annotations
-
-import argparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-import zipfile
-from pathlib import Path, PurePosixPath
-from typing import BinaryIO
-
-EXE_SUFFIX = ".exe" if os.name == "nt" else ""
-
-
-class TmpDir:
-    """Manage temporary directory creation."""
-
-    def __init__(self) -> None:
-        self._tmp_dir: Path | None = None
-
-    def delete(self) -> None:
-        if self._tmp_dir:
-            shutil.rmtree(self._tmp_dir)
-
-    def get_directory(self) -> Path:
-        if not self._tmp_dir:
-            self._tmp_dir = Path(tempfile.mkdtemp())
-        return self._tmp_dir
-
-
-def get_ndk_paths() -> tuple[Path, Path, str]:
-    """Parse and find all of the paths of the ndk
-
-    Returns: Three values:
-             Full path to the root of the ndk install.
-             Full path to the ndk bin directory where this executable lives.
-             The platform name (eg linux-x86_64).
-    """
-
-    # ndk-stack is installed as a zipped Python application (created with zipapp). The
-    # behavior of __file__ when Python runs a zip file doesn't appear to be documented,
-    # but experimentally for this case it will be:
-    #
-    #     $NDK/prebuilt/darwin-x86_64/bin/ndkstack.pyz/ndkstack.py
-    #
-    # ndk-stack is installed to $NDK/prebuilt/<platform>/bin, so from
-    # `android-ndk-r18/prebuilt/linux-x86_64/bin/ndk-stack`...
-    # ...get `android-ndk-r18/`:
-    path_in_zipped_app = Path(__file__)
-    zip_root = path_in_zipped_app.parent
-    ndk_bin = zip_root.parent
-    ndk_root = ndk_bin.parent.parent.parent
-    # ...get `linux-x86_64`:
-    ndk_host_tag = ndk_bin.parent.name
-    return ndk_root, ndk_bin, ndk_host_tag
-
-
-def find_llvm_symbolizer(ndk_root: Path, ndk_bin: Path, ndk_host_tag: str) -> Path:
-    """Finds the NDK llvm-symbolizer(1) binary.
-
-    Returns: An absolute path to llvm-symbolizer(1).
-    """
-
-    llvm_symbolizer = "llvm-symbolizer" + EXE_SUFFIX
-    path = (
-        ndk_root / "toolchains/llvm/prebuilt" / ndk_host_tag / "bin" / llvm_symbolizer
-    )
-    if path.exists():
-        return path
-
-    # Okay, maybe we're a standalone toolchain? (https://github.com/android-ndk/ndk/issues/931)
-    # In that case, llvm-symbolizer and ndk-stack are conveniently in
-    # the same directory...
-    if (path := ndk_bin / llvm_symbolizer).exists():
-        return path
-    raise OSError("Unable to find llvm-symbolizer")
-
-
-def find_readelf(ndk_root: Path, ndk_bin: Path, ndk_host_tag: str) -> Path | None:
-    """Finds the NDK readelf(1) binary.
-
-    Returns: An absolute path to readelf(1).
-    """
-
-    readelf = "llvm-readelf" + EXE_SUFFIX
-    m = re.match("^[^-]+-(.*)", ndk_host_tag)
-    if m:
-        # Try as if this is not a standalone install.
-        path = ndk_root / "toolchains/llvm/prebuilt" / ndk_host_tag / "bin" / readelf
-        if path.exists():
-            return path
-
-    # Might be a standalone toolchain.
-    path = ndk_bin / readelf
-    if path.exists():
-        return path
-    return None
-
-
-def get_build_id(readelf_path: Path, elf_file: Path) -> bytes | None:
-    """Get the GNU build id note from an elf file.
-
-    Returns: The build id found or None if there is no build id or the
-             readelf path does not exist.
-    """
-
-    try:
-        output = subprocess.check_output([str(readelf_path), "-n", str(elf_file)])
-        m = re.search(rb"Build ID:\s+([0-9a-f]+)", output)
-        if not m:
-            return None
-        return m.group(1)
-    except subprocess.CalledProcessError:
-        return None
-
-
-def get_zip_info_from_offset(
-    zip_file: zipfile.ZipFile, offset: int
-) -> zipfile.ZipInfo | None:
-    """Get the ZipInfo object from a zip file.
-
-    Returns: A ZipInfo object found at the 'offset' into the zip file.
-             Returns None if no file can be found at the given 'offset'.
-    """
-    assert zip_file.filename is not None
-
-    file_size = os.stat(zip_file.filename).st_size
-    if offset >= file_size:
-        return None
-
-    # The code below requires that the infos are sorted by header_offset,
-    # so sort the infos.
-    infos = sorted(zip_file.infolist(), key=lambda info: info.header_offset)
-    if not infos or offset < infos[0].header_offset:
-        return None
-
-    for i in range(1, len(infos)):
-        prev_info = infos[i - 1]
-        cur_offset = infos[i].header_offset
-        if prev_info.header_offset <= offset < cur_offset:
-            zip_info = prev_info
-            return zip_info
-    zip_info = infos[len(infos) - 1]
-    if offset < zip_info.header_offset:
-        return None
-    return zip_info
-
-
-class FrameInfo:
-    """A class to represent the data in a single backtrace frame.
-
-    Attributes:
-      num: The string representing the frame number (eg #01).
-      pc: The relative program counter for the frame.
-      elf_file: The file or map name in which the relative pc resides.
-      container_file: The name of the file that contains the elf_file.
-                      For example, an entry like GoogleCamera.apk!libsome.so
-                      would set container_file to GoogleCamera.apk and
-                      set elf_file to libsome.so. Set to None if no ! found.
-      offset: The offset into the file at which this library was mapped.
-              Set to None if no offset found.
-      build_id: The Gnu build id note parsed from the frame information.
-                Set to None if no build id found.
-      tail: The part of the line after the program counter.
-    """
-
-    # See unwindstack::FormatFrame in libunwindstack.
-    # We're deliberately very loose because NDK users are likely to be
-    # looking at crashes on ancient OS releases.
-    # TODO: support asan stacks too?
-    #
-    # The PC will begin with 0x for some traces. That's not the norm, but we've had a
-    # report of traces with that format being provided by the Play console. Presumably
-    # either Play is rewriting those (though I can't imagine why they'd be doing that),
-    # or some OEM has altered the format of the crash output.
-    # See https://github.com/android/ndk/issues/1898.
-    _line_re = re.compile(rb".* +(#[0-9]+) +pc (?:0x)?([0-9a-f]+) +(([^ ]+).*)")
-    _sanitizer_line_re = re.compile(
-        rb".* +(#[0-9]+) +0x[0-9a-f]* +\(([^ ]+)\+0x([0-9a-f]+)\)"
-    )
-    _lib_re = re.compile(r"([^\!]+)\!(.+)")
-    _offset_re = re.compile(rb"\(offset\s+(0x[0-9a-f]+)\)")
-    _build_id_re = re.compile(rb"\(BuildId:\s+([0-9a-f]+)\)")
-
-    @classmethod
-    def from_line(cls, line: bytes) -> FrameInfo | None:
-        m = FrameInfo._line_re.match(line)
-        if m:
-            num, pc, tail, elf_file = m.group(1, 2, 3, 4)
-            # The path in the trace file comes from a POSIX system, so it can
-            # contain arbitrary bytes that are not valid UTF-8. If the user is
-            # on Windows it's impossible for us to handle those paths. This is
-            # an extremely unlikely circumstance. In any case, the fix on the
-            # user's side is "don't do that", so just attempt to decode UTF-8
-            # and let the exception be thrown if it isn't.
-            return cls(num, pc, tail, PurePosixPath(elf_file.decode("utf-8")))
-        m = FrameInfo._sanitizer_line_re.match(line)
-        if m:
-            num, pc, tail, elf_file = m.group(1, 3, 2, 2)
-            return cls(
-                num, pc, tail, PurePosixPath(elf_file.decode("utf-8")), sanitizer=True
-            )
-        return None
-
-    def __init__(
-        self,
-        num: bytes,
-        pc: bytes,
-        tail: bytes,
-        elf_file: PurePosixPath,
-        sanitizer: bool = False,
-    ) -> None:
-        self.num = num
-        self.pc = pc
-        self.tail = tail
-        self.elf_file = elf_file
-        self.sanitizer = sanitizer
-
-        if (library_match := FrameInfo._lib_re.match(str(self.elf_file))) is not None:
-            self.container_file: PurePosixPath | None = PurePosixPath(
-                library_match.group(1)
-            )
-            self.elf_file = PurePosixPath(library_match.group(2))
-            # Sometimes an entry like this will occur:
-            #   #01 pc 0000abcd  /system/lib/lib/libc.so!libc.so (offset 0x1000)
-            # In this case, no container file should be set.
-            if os.path.basename(self.container_file) == os.path.basename(self.elf_file):
-                self.elf_file = self.container_file
-                self.container_file = None
-        else:
-            self.container_file = None
-        m = FrameInfo._offset_re.search(self.tail)
-        if m:
-            self.offset: int | None = int(m.group(1), 16)
-        else:
-            self.offset = None
-        m = FrameInfo._build_id_re.search(self.tail)
-        if m:
-            self.build_id = m.group(1)
-        else:
-            self.build_id = None
-
-    def verify_elf_file(
-        self, readelf_path: Path | None, elf_file_path: Path, display_elf_path: str
-    ) -> bool:
-        """Verify if the elf file is valid.
-
-        Returns: True if the elf file exists and build id matches (if it exists).
-        """
-
-        if not os.path.exists(elf_file_path):
-            return False
-        if readelf_path and self.build_id:
-            build_id = get_build_id(readelf_path, elf_file_path)
-            if build_id is None:
-                print(
-                    f"ERROR: Could not determine build ID for {elf_file_path}",
-                    flush=True,
-                )
-                return False
-            if self.build_id != build_id:
-                print(
-                    "WARNING: Mismatched build id for %s" % (display_elf_path),
-                    flush=True,
-                )
-                print(
-                    "WARNING:   Expected %s" % (self.build_id.decode("utf-8")),
-                    flush=True,
-                )
-                print("WARNING:   Found    %s" % (build_id.decode("utf-8")), flush=True)
-                return False
-        return True
-
-    def get_elf_file(
-        self, symbol_dir: Path, readelf_path: Path | None, tmp_dir: TmpDir
-    ) -> Path | None:
-        """Get the path to the elf file represented by this frame.
-
-        Returns: The path to the elf file if it is valid, or None if
-                 no valid elf file can be found. If the file has to be
-                 extracted from an apk, the elf file will be placed in
-                 tmp_dir.
-        """
-
-        elf_file = self.elf_file.name
-        if self.container_file:
-            # This matches a file format such as Base.apk!libsomething.so
-            # so see if we can find libsomething.so in the symbol directory.
-            elf_file_path = symbol_dir / elf_file
-            if self.verify_elf_file(readelf_path, elf_file_path, str(elf_file_path)):
-                return elf_file_path
-
-            apk_file_path = symbol_dir / self.container_file.name
-            with zipfile.ZipFile(apk_file_path) as zip_file:
-                assert self.offset is not None
-                zip_info = get_zip_info_from_offset(zip_file, self.offset)
-                if not zip_info:
-                    return None
-                elf_file_path = Path(
-                    zip_file.extract(zip_info, tmp_dir.get_directory())
-                )
-                display_elf_file = "%s!%s" % (apk_file_path, elf_file)
-                if not self.verify_elf_file(
-                    readelf_path, elf_file_path, display_elf_file
-                ):
-                    return None
-                return elf_file_path
-        elif self.elf_file.suffix == ".apk":
-            # This matches a stack line such as:
-            #   #08 pc 00cbed9c  GoogleCamera.apk (offset 0x6e32000)
-            apk_file_path = symbol_dir / elf_file
-            with zipfile.ZipFile(apk_file_path) as zip_file:
-                assert self.offset is not None
-                zip_info = get_zip_info_from_offset(zip_file, self.offset)
-                if not zip_info:
-                    return None
-
-                # Rewrite the output tail so that it goes from:
-                #   GoogleCamera.apk ...
-                # To:
-                #   GoogleCamera.apk!libsomething.so ...
-                index = self.tail.find(elf_file.encode("utf-8"))
-                if index != -1:
-                    index += len(elf_file)
-                    self.tail = (
-                        self.tail[0:index]
-                        + b"!"
-                        + bytes(zip_info.filename, encoding="utf-8")
-                        + self.tail[index:]
-                    )
-                elf_file = os.path.basename(zip_info.filename)
-                elf_file_path = symbol_dir / elf_file
-                if self.verify_elf_file(
-                    readelf_path, elf_file_path, str(elf_file_path)
-                ):
-                    return elf_file_path
-
-                elf_file_path = Path(
-                    zip_file.extract(zip_info, tmp_dir.get_directory())
-                )
-                display_elf_path = "%s!%s" % (apk_file_path, elf_file)
-                if not self.verify_elf_file(
-                    readelf_path, elf_file_path, display_elf_path
-                ):
-                    return None
-                return elf_file_path
-        elf_file_path = symbol_dir / elf_file
-        if self.verify_elf_file(readelf_path, elf_file_path, str(elf_file_path)):
-            return elf_file_path
-        return None
-
-
-def symbolize_trace(trace_input: BinaryIO, symbol_dir: Path) -> None:
-    ndk_paths = get_ndk_paths()
-    symbolize_cmd = [
-        str(find_llvm_symbolizer(*ndk_paths)),
-        "--demangle",
-        "--functions=linkage",
-        "--inlines",
-    ]
-    readelf_path = find_readelf(*ndk_paths)
-
-    symbolize_proc = None
-
-    try:
-        tmp_dir = TmpDir()
-        symbolize_proc = subprocess.Popen(
-            symbolize_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE
-        )
-        assert symbolize_proc.stdin is not None
-        assert symbolize_proc.stdout is not None
-        banner = b"*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***"
-        in_crash = False
-        saw_frame = False
-        for line in trace_input:
-            line = line.rstrip()
-
-            if not in_crash:
-                if banner in line:
-                    in_crash = True
-                    saw_frame = False
-                    print("********** Crash dump: **********", flush=True)
-                continue
-
-            for tag in [b"Build fingerprint:", b"Abort message:"]:
-                if tag in line:
-                    sys.stdout.buffer.write(line[line.find(tag) :])
-                    print(flush=True)
-                    continue
-
-            frame_info = FrameInfo.from_line(line)
-            if not frame_info:
-                if saw_frame:
-                    in_crash = False
-                    print("Crash dump is completed\n", flush=True)
-                continue
-
-            # There can be a gap between sanitizer frames in the abort message
-            # and the actual backtrace. Do not end the crash dump until we've
-            # seen the actual backtrace.
-            if not frame_info.sanitizer:
-                saw_frame = True
-
-            try:
-                elf_file = frame_info.get_elf_file(symbol_dir, readelf_path, tmp_dir)
-            except IOError:
-                elf_file = None
-
-            # Print a slightly different version of the stack trace line.
-            # The original format:
-            #      #00 pc 0007b350  /lib/bionic/libc.so (__strchr_chk+4)
-            # becomes:
-            #      #00 0x0007b350 /lib/bionic/libc.so (__strchr_chk+4)
-            out_line = b"%s 0x%s %s\n" % (
-                frame_info.num,
-                frame_info.pc,
-                frame_info.tail,
-            )
-            sys.stdout.buffer.write(out_line)
-            indent = (out_line.find(b"(") + 1) * b" "
-            if not elf_file:
-                continue
-            value = b'"%s" 0x%s\n' % (elf_file, frame_info.pc)
-            symbolize_proc.stdin.write(value)
-            symbolize_proc.stdin.flush()
-            while True:
-                symbolizer_output = symbolize_proc.stdout.readline().rstrip()
-                if not symbolizer_output:
-                    break
-                # TODO: rewrite file names base on a source path?
-                sys.stdout.buffer.write(b"%s%s\n" % (indent, symbolizer_output))
-    finally:
-        trace_input.close()
-        tmp_dir.delete()
-        if symbolize_proc:
-            assert symbolize_proc.stdin is not None
-            assert symbolize_proc.stdout is not None
-            symbolize_proc.stdin.close()
-            symbolize_proc.stdout.close()
-            symbolize_proc.kill()
-            symbolize_proc.wait()
-
-
-def main(argv: list[str] | None = None) -> None:
-    """ "Program entry point."""
-    parser = argparse.ArgumentParser(
-        description="Symbolizes Android crashes.",
-        epilog="See <https://developer.android.com/ndk/guides/ndk-stack>.",
-    )
-    parser.add_argument(
-        "-sym",
-        "--sym",
-        dest="symbol_dir",
-        type=Path,
-        required=True,  # TODO: default to '.'?
-        help="directory containing unstripped .so files",
-    )
-    parser.add_argument(
-        "-i",
-        "-dump",
-        "--dump",
-        dest="input",
-        default=sys.stdin.buffer,
-        type=argparse.FileType("rb"),
-        help="input filename",
-    )
-    args = parser.parse_args(argv)
-
-    if not os.path.exists(args.symbol_dir):
-        sys.exit("{} does not exist!\n".format(args.symbol_dir))
-
-    symbolize_trace(args.input, args.symbol_dir)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/parse_elfnote.py b/parse_elfnote.py
index 6be45c0..74a61fb 100755
--- a/parse_elfnote.py
+++ b/parse_elfnote.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -26,16 +26,14 @@
 #
 
 from __future__ import division, print_function
-
 import argparse
 import logging
-import shutil
 import struct
 import subprocess
 import sys
-from pathlib import Path
 
-SEC_NAME = ".note.android.ident"
+
+SEC_NAME = '.note.android.ident'
 NDK_RESERVED_SIZE = 64
 
 
@@ -50,7 +48,7 @@
     return (val + (step - 1)) // step * step
 
 
-class StructParser:
+class StructParser(object):
     def __init__(self, buf):
         self.buf = buf
         self.pos = 0
@@ -64,60 +62,59 @@
         return self.remaining == 0
 
     def read(self, read_len):
-        buf = self.buf[self.pos : read_len + self.pos]
+        buf = self.buf[self.pos:read_len + self.pos]
         self.pos += read_len
         return buf
 
     def read_struct(self, fmt, kind):
         fmt = struct.Struct(fmt)
         if self.remaining < fmt.size:
-            sys.exit("error: {} was truncated".format(kind))
+            sys.exit('error: {} was truncated'.format(kind))
         return fmt.unpack(self.read(fmt.size))
 
 
 def iterate_notes(sec_data):
     sec_data = StructParser(sec_data)
     while not sec_data.empty:
-        (namesz, descsz, kind) = sec_data.read_struct("<III", "note header")
+        (namesz, descsz, kind) = sec_data.read_struct('<III', 'note header')
         (name, desc) = sec_data.read_struct(
-            "{}s{}s".format(
-                round_up_to_nearest(namesz, 4), round_up_to_nearest(descsz, 4)
-            ),
-            "note body",
-        )
+            '{}s{}s'.format(
+                round_up_to_nearest(namesz, 4),
+                round_up_to_nearest(descsz, 4)),
+            'note body')
         name = name[:namesz]
         if len(name) > 0:
-            if name[-1:] == b"\0":
+            if name[-1:] == b'\0':
                 name = name[:-1]
             else:
-                logger().warning("note name %s isn't NUL-terminated", name)
+                logger().warning('note name %s isn\'t NUL-terminated', name)
         yield name, kind, desc[:descsz]
 
 
 def dump_android_ident_note(note):
     note = StructParser(note)
-    (android_api,) = note.read_struct("<I", "note descriptor")
-    print("ABI_ANDROID_API: {}".format(android_api))
+    (android_api,) = note.read_struct('<I', 'note descriptor')
+    print('ABI_ANDROID_API: {}'.format(android_api))
     if note.empty:
         return
     # Binaries generated by NDK r14 and later have these extra fields. Platform
     # binaries and binaries generated by older NDKs don't.
     ndk_version, ndk_build_number = note.read_struct(
-        "{sz}s{sz}s".format(sz=NDK_RESERVED_SIZE), "note descriptor"
-    )
-    ndk_version = ndk_version.decode("utf-8")
-    ndk_build_number = ndk_build_number.decode("utf-8")
-    print("ABI_NDK_VERSION: {}".format(ndk_version.rstrip("\0")))
-    print("ABI_NDK_BUILD_NUMBER: {}".format(ndk_build_number.rstrip("\0")))
+        '{}s{}s'.format(NDK_RESERVED_SIZE, NDK_RESERVED_SIZE),
+        'note descriptor')
+    ndk_version = ndk_version.decode('utf-8')
+    ndk_build_number = ndk_build_number.decode('utf-8')
+    print('ABI_NDK_VERSION: {}'.format(ndk_version.rstrip('\0')))
+    print('ABI_NDK_BUILD_NUMBER: {}'.format(ndk_build_number.rstrip('\0')))
     if not note.empty:
-        logger().warning("excess data at end of descriptor")
+        logger().warning('excess data at end of descriptor')
 
 
 # Get the offset to a section from the output of readelf
-def get_section_pos(readelf: Path, sec_name: str, file_path: str) -> tuple[int, int]:
-    cmd = [readelf, "--sections", "-W", file_path]
+def get_section_pos(sec_name, file_path):
+    cmd = ['readelf', '--sections', '-W', file_path]
     output = subprocess.check_output(cmd)
-    lines = output.decode("utf-8").splitlines()
+    lines = output.decode('utf-8').splitlines()
     for line in lines:
         logger().debug('Checking line for "%s": %s', sec_name, line)
         # Looking for a line like the following (all whitespace of unknown
@@ -128,9 +125,9 @@
         # The only column that might have internal whitespace is the first one.
         # Since we don't care about it, remove the head of the string until the
         # closing bracket, then split.
-        if "]" not in line:
+        if ']' not in line:
             continue
-        line = line[line.index("]") + 1 :]
+        line = line[line.index(']') + 1:]
 
         sections = line.split()
         if len(sections) < 5 or sec_name != sections[0]:
@@ -138,71 +135,17 @@
         off = int(sections[3], 16)
         size = int(sections[4], 16)
         return (off, size)
-    sys.exit("error: failed to find section: {}".format(sec_name))
-
-
-def get_ndk_install_path() -> Path | None:
-    try:
-        import ndk.paths  # pylint: disable=import-outside-toplevel
-
-        path = ndk.paths.get_install_path()
-        if path.exists():
-            return path
-        return None
-    except ImportError:
-        return None
-
-
-def readelf_from_ndk(ndk: Path) -> Path:
-    if not ndk.exists():
-        raise ValueError(f"--ndk is {ndk} but that path does not exist")
-    prebuilt_dir = ndk / "toolchains/llvm/prebuilt"
-    bins = list(prebuilt_dir.glob("*/bin"))
-    if not bins:
-        raise RuntimeError(f"{prebuilt_dir} contains no */bin")
-    if len(bins) != 1:
-        raise RuntimeError(f"{prebuilt_dir} contains more than one */bin")
-    bin_dir = bins[0]
-
-    readelf = (bin_dir / "llvm-readelf").with_suffix(
-        ".exe" if sys.platform == "win32" else ""
-    )
-    if not readelf.exists():
-        raise RuntimeError(f"{readelf} does not exist")
-    return readelf
-
-
-def find_readelf(ndk: Path | None) -> Path:
-    if ndk is not None:
-        return readelf_from_ndk(ndk)
-    if (install_path := get_ndk_install_path()) is not None:
-        return readelf_from_ndk(install_path)
-    if (readelf := shutil.which("llvm-readelf")) is not None:
-        return Path(readelf)
-    if (readelf := shutil.which("readelf")) is not None:
-        return Path(readelf)
-    raise RuntimeError(
-        "Could not find llvm-readelf or readelf in PATH and could find find any NDK"
-    )
+    sys.exit('error: failed to find section: {}'.format(sec_name))
 
 
 def parse_args():
     """Parses command line arguments."""
     parser = argparse.ArgumentParser()
-    parser.add_argument("file_path", help="path of the ELF file with embedded ABI tags")
+    parser.add_argument('file_path',
+                        help="path of the ELF file with embedded ABI tags")
     parser.add_argument(
-        "-v",
-        "--verbose",
-        dest="verbosity",
-        action="count",
-        default=0,
-        help="Increase logging verbosity.",
-    )
-    parser.add_argument(
-        "--ndk",
-        type=Path,
-        help="Path to the NDK. If given, the NDK's llvm-readelf will be used.",
-    )
+        '-v', '--verbose', dest='verbosity', action='count', default=0,
+        help='Increase logging verbosity.')
     return parser.parse_args()
 
 
@@ -217,27 +160,24 @@
 
     file_path = args.file_path
 
-    readelf = find_readelf(args.ndk)
-
     with open(file_path, "rb") as obj_file:
-        (sec_off, sec_size) = get_section_pos(readelf, SEC_NAME, file_path)
+        (sec_off, sec_size) = get_section_pos(SEC_NAME, file_path)
 
         obj_file.seek(sec_off)
         sec_data = obj_file.read(sec_size)
         if len(sec_data) != sec_size:
-            sys.exit("error: could not read {} section".format(SEC_NAME))
+            sys.exit('error: could not read {} section'.format(SEC_NAME))
 
-        print("----------ABI INFO----------")
+        print('----------ABI INFO----------')
         if len(sec_data) == 0:
-            logger().warning("%s section is empty", SEC_NAME)
-        for name, kind, desc in iterate_notes(sec_data):
-            if (name, kind) == (b"Android", 1):
+            logger().warning('%s section is empty', SEC_NAME)
+        for (name, kind, desc) in iterate_notes(sec_data):
+            if (name, kind) == (b'Android', 1):
                 dump_android_ident_note(desc)
             else:
-                logger().warning(
-                    "unrecognized note (name %s, type %d)", repr(name), kind
-                )
+                logger().warning('unrecognized note (name %s, type %d)',
+                                 repr(name), kind)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/poetry.lock b/poetry.lock
deleted file mode 100644
index a3fe4d3..0000000
--- a/poetry.lock
+++ /dev/null
@@ -1,896 +0,0 @@
-# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
-
-[[package]]
-name = "adb"
-version = "0.0.1"
-description = "A Python interface to the Android Debug Bridge."
-category = "dev"
-optional = false
-python-versions = "*"
-files = []
-develop = false
-
-[package.source]
-type = "directory"
-url = "../development/python-packages/adb"
-
-[[package]]
-name = "aiohttp"
-version = "3.9.3"
-description = "Async http client/server framework (asyncio)"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"},
-    {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"},
-    {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"},
-    {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"},
-    {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"},
-    {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"},
-    {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"},
-    {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"},
-    {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"},
-    {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"},
-    {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"},
-    {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"},
-    {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"},
-    {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"},
-    {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"},
-    {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"},
-    {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"},
-    {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"},
-    {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"},
-    {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"},
-    {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"},
-    {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"},
-    {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"},
-    {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"},
-    {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"},
-    {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"},
-    {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"},
-    {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"},
-    {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"},
-    {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"},
-    {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"},
-    {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"},
-    {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"},
-    {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"},
-    {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"},
-    {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"},
-    {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"},
-    {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"},
-    {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"},
-    {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"},
-    {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"},
-    {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"},
-    {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"},
-    {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"},
-    {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"},
-    {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"},
-    {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"},
-    {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"},
-    {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"},
-    {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"},
-    {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"},
-    {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"},
-    {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"},
-    {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"},
-    {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"},
-    {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"},
-    {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"},
-    {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"},
-    {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"},
-    {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"},
-    {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"},
-    {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"},
-    {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"},
-    {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"},
-    {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"},
-    {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"},
-    {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"},
-    {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"},
-    {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"},
-    {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"},
-    {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"},
-    {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"},
-    {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"},
-    {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"},
-    {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"},
-    {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"},
-]
-
-[package.dependencies]
-aiosignal = ">=1.1.2"
-attrs = ">=17.3.0"
-frozenlist = ">=1.1.1"
-multidict = ">=4.5,<7.0"
-yarl = ">=1.0,<2.0"
-
-[package.extras]
-speedups = ["Brotli", "aiodns", "brotlicffi"]
-
-[[package]]
-name = "aiosignal"
-version = "1.3.1"
-description = "aiosignal: a list of registered asynchronous callbacks"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"},
-    {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"},
-]
-
-[package.dependencies]
-frozenlist = ">=1.1.0"
-
-[[package]]
-name = "astroid"
-version = "3.1.0"
-description = "An abstract syntax tree for Python with inference support."
-category = "dev"
-optional = false
-python-versions = ">=3.8.0"
-files = [
-    {file = "astroid-3.1.0-py3-none-any.whl", hash = "sha256:951798f922990137ac090c53af473db7ab4e70c770e6d7fae0cec59f74411819"},
-    {file = "astroid-3.1.0.tar.gz", hash = "sha256:ac248253bfa4bd924a0de213707e7ebeeb3138abeb48d798784ead1e56d419d4"},
-]
-
-[[package]]
-name = "attrs"
-version = "23.2.0"
-description = "Classes Without Boilerplate"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
-    {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
-]
-
-[package.extras]
-cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
-dev = ["attrs[tests]", "pre-commit"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
-tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
-tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
-
-[[package]]
-name = "black"
-version = "24.3.0"
-description = "The uncompromising code formatter."
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"},
-    {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"},
-    {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"},
-    {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"},
-    {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"},
-    {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"},
-    {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"},
-    {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"},
-    {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"},
-    {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"},
-    {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"},
-    {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"},
-    {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"},
-    {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"},
-    {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"},
-    {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"},
-    {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"},
-    {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"},
-    {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"},
-    {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"},
-    {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"},
-    {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-packaging = ">=22.0"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
-[[package]]
-name = "click"
-version = "8.1.7"
-description = "Composable command line interface toolkit"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
-    {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "platform_system == \"Windows\""}
-
-[[package]]
-name = "colorama"
-version = "0.4.6"
-description = "Cross-platform colored terminal text."
-category = "dev"
-optional = false
-python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
-files = [
-    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
-    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
-]
-
-[[package]]
-name = "coverage"
-version = "7.4.4"
-description = "Code coverage measurement for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"},
-    {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"},
-    {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"},
-    {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"},
-    {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"},
-    {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"},
-    {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"},
-    {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"},
-    {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"},
-    {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"},
-    {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"},
-    {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"},
-    {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"},
-    {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"},
-    {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"},
-    {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"},
-    {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"},
-    {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"},
-    {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"},
-    {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"},
-    {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"},
-    {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"},
-    {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"},
-    {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"},
-    {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"},
-    {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"},
-    {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"},
-    {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"},
-    {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"},
-    {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"},
-    {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"},
-    {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"},
-    {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"},
-    {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"},
-    {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"},
-    {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"},
-    {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"},
-    {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"},
-    {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"},
-    {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"},
-    {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"},
-    {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"},
-    {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"},
-    {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"},
-    {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"},
-    {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"},
-    {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"},
-    {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"},
-    {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"},
-    {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"},
-    {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"},
-    {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"},
-]
-
-[package.extras]
-toml = ["tomli"]
-
-[[package]]
-name = "dill"
-version = "0.3.8"
-description = "serialize all of Python"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"},
-    {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"},
-]
-
-[package.extras]
-graph = ["objgraph (>=1.7.2)"]
-profile = ["gprof2dot (>=2022.7.29)"]
-
-[[package]]
-name = "fetchartifact"
-version = "0.1.0"
-description = "Python library for https://android.googlesource.com/tools/fetch_artifact/."
-category = "dev"
-optional = false
-python-versions = "^3.10"
-files = []
-develop = true
-
-[package.dependencies]
-aiohttp = "^3.8.4"
-
-[package.source]
-type = "directory"
-url = "../development/python-packages/fetchartifact"
-
-[[package]]
-name = "frozenlist"
-version = "1.4.1"
-description = "A list-like structure which implements collections.abc.MutableSequence"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
-    {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
-    {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"},
-    {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"},
-    {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"},
-    {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"},
-    {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"},
-    {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"},
-    {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"},
-    {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"},
-    {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"},
-    {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"},
-    {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"},
-    {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"},
-    {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"},
-    {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"},
-    {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"},
-    {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"},
-    {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"},
-    {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"},
-    {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"},
-    {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"},
-    {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"},
-    {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"},
-    {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"},
-    {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"},
-    {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"},
-    {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"},
-    {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"},
-    {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"},
-    {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"},
-    {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"},
-    {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"},
-    {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"},
-    {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"},
-    {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"},
-    {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"},
-    {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"},
-    {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"},
-    {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"},
-    {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"},
-    {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"},
-    {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"},
-    {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"},
-    {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"},
-    {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"},
-    {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"},
-    {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"},
-    {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"},
-    {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"},
-    {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"},
-    {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"},
-    {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"},
-    {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"},
-    {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"},
-    {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"},
-    {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"},
-    {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"},
-    {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"},
-    {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"},
-    {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"},
-    {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"},
-    {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"},
-    {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"},
-    {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"},
-    {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"},
-    {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"},
-    {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"},
-    {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"},
-    {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"},
-    {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"},
-    {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"},
-    {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"},
-    {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"},
-    {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"},
-    {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"},
-    {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
-]
-
-[[package]]
-name = "gdbrunner"
-version = "0.0.1"
-description = "Common helpers of ndk-gdb and gdbclient."
-category = "dev"
-optional = false
-python-versions = "*"
-files = []
-develop = false
-
-[package.source]
-type = "directory"
-url = "../development/python-packages/gdbrunner"
-
-[[package]]
-name = "idna"
-version = "3.6"
-description = "Internationalized Domain Names in Applications (IDNA)"
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
-    {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
-    {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
-]
-
-[[package]]
-name = "iniconfig"
-version = "2.0.0"
-description = "brain-dead simple config-ini parsing"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
-    {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
-]
-
-[[package]]
-name = "isort"
-version = "5.13.2"
-description = "A Python utility / library to sort Python imports."
-category = "dev"
-optional = false
-python-versions = ">=3.8.0"
-files = [
-    {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"},
-    {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"},
-]
-
-[package.extras]
-colors = ["colorama (>=0.4.6)"]
-
-[[package]]
-name = "mccabe"
-version = "0.7.0"
-description = "McCabe checker, plugin for flake8"
-category = "dev"
-optional = false
-python-versions = ">=3.6"
-files = [
-    {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
-    {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
-]
-
-[[package]]
-name = "multidict"
-version = "6.0.5"
-description = "multidict implementation"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
-    {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
-    {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
-    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
-    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
-    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
-    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
-    {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
-    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
-    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
-    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
-    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
-    {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
-    {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
-    {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
-    {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
-    {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
-    {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
-    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
-    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
-    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
-    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
-    {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
-    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
-    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
-    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
-    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
-    {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
-    {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
-    {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
-    {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
-    {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
-    {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
-    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
-    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
-    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
-    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
-    {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
-    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
-    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
-    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
-    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
-    {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
-    {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
-    {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
-    {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
-    {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
-    {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
-    {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
-    {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
-    {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
-    {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
-    {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
-    {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
-    {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
-    {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
-    {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
-    {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
-    {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
-    {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
-    {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
-    {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
-    {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
-    {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
-    {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
-    {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
-    {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
-    {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
-    {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
-    {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
-    {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
-    {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
-    {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
-    {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
-    {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
-    {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
-    {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
-    {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
-    {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
-    {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
-    {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
-    {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
-    {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
-    {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
-    {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
-    {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
-    {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
-    {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
-    {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
-    {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
-]
-
-[[package]]
-name = "mypy"
-version = "1.9.0"
-description = "Optional static typing for Python"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "mypy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8a67616990062232ee4c3952f41c779afac41405806042a8126fe96e098419f"},
-    {file = "mypy-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d357423fa57a489e8c47b7c85dfb96698caba13d66e086b412298a1a0ea3b0ed"},
-    {file = "mypy-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49c87c15aed320de9b438ae7b00c1ac91cd393c1b854c2ce538e2a72d55df150"},
-    {file = "mypy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:48533cdd345c3c2e5ef48ba3b0d3880b257b423e7995dada04248725c6f77374"},
-    {file = "mypy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:4d3dbd346cfec7cb98e6cbb6e0f3c23618af826316188d587d1c1bc34f0ede03"},
-    {file = "mypy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:653265f9a2784db65bfca694d1edd23093ce49740b2244cde583aeb134c008f3"},
-    {file = "mypy-1.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a3c007ff3ee90f69cf0a15cbcdf0995749569b86b6d2f327af01fd1b8aee9dc"},
-    {file = "mypy-1.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2418488264eb41f69cc64a69a745fad4a8f86649af4b1041a4c64ee61fc61129"},
-    {file = "mypy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:68edad3dc7d70f2f17ae4c6c1b9471a56138ca22722487eebacfd1eb5321d612"},
-    {file = "mypy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:85ca5fcc24f0b4aeedc1d02f93707bccc04733f21d41c88334c5482219b1ccb3"},
-    {file = "mypy-1.9.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aceb1db093b04db5cd390821464504111b8ec3e351eb85afd1433490163d60cd"},
-    {file = "mypy-1.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0235391f1c6f6ce487b23b9dbd1327b4ec33bb93934aa986efe8a9563d9349e6"},
-    {file = "mypy-1.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4d5ddc13421ba3e2e082a6c2d74c2ddb3979c39b582dacd53dd5d9431237185"},
-    {file = "mypy-1.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:190da1ee69b427d7efa8aa0d5e5ccd67a4fb04038c380237a0d96829cb157913"},
-    {file = "mypy-1.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:fe28657de3bfec596bbeef01cb219833ad9d38dd5393fc649f4b366840baefe6"},
-    {file = "mypy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e54396d70be04b34f31d2edf3362c1edd023246c82f1730bbf8768c28db5361b"},
-    {file = "mypy-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5e6061f44f2313b94f920e91b204ec600982961e07a17e0f6cd83371cb23f5c2"},
-    {file = "mypy-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a10926e5473c5fc3da8abb04119a1f5811a236dc3a38d92015cb1e6ba4cb9e"},
-    {file = "mypy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b685154e22e4e9199fc95f298661deea28aaede5ae16ccc8cbb1045e716b3e04"},
-    {file = "mypy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d741d3fc7c4da608764073089e5f58ef6352bedc223ff58f2f038c2c4698a89"},
-    {file = "mypy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587ce887f75dd9700252a3abbc9c97bbe165a4a630597845c61279cf32dfbf02"},
-    {file = "mypy-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f88566144752999351725ac623471661c9d1cd8caa0134ff98cceeea181789f4"},
-    {file = "mypy-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61758fabd58ce4b0720ae1e2fea5cfd4431591d6d590b197775329264f86311d"},
-    {file = "mypy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e49499be624dead83927e70c756970a0bc8240e9f769389cdf5714b0784ca6bf"},
-    {file = "mypy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:571741dc4194b4f82d344b15e8837e8c5fcc462d66d076748142327626a1b6e9"},
-    {file = "mypy-1.9.0-py3-none-any.whl", hash = "sha256:a260627a570559181a9ea5de61ac6297aa5af202f06fd7ab093ce74e7181e43e"},
-    {file = "mypy-1.9.0.tar.gz", hash = "sha256:3cc5da0127e6a478cddd906068496a97a7618a21ce9b54bde5bf7e539c7af974"},
-]
-
-[package.dependencies]
-mypy-extensions = ">=1.0.0"
-typing-extensions = ">=4.1.0"
-
-[package.extras]
-dmypy = ["psutil (>=4.0)"]
-install-types = ["pip"]
-mypyc = ["setuptools (>=50)"]
-reports = ["lxml"]
-
-[[package]]
-name = "mypy-extensions"
-version = "1.0.0"
-description = "Type system extensions for programs checked with the mypy type checker."
-category = "dev"
-optional = false
-python-versions = ">=3.5"
-files = [
-    {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
-    {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
-]
-
-[[package]]
-name = "packaging"
-version = "24.0"
-description = "Core utilities for Python packages"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"},
-    {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"},
-]
-
-[[package]]
-name = "pathspec"
-version = "0.12.1"
-description = "Utility library for gitignore style pattern matching of file paths."
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
-    {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
-]
-
-[[package]]
-name = "platformdirs"
-version = "4.2.0"
-description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"},
-    {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"},
-]
-
-[package.extras]
-docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
-
-[[package]]
-name = "pluggy"
-version = "1.4.0"
-description = "plugin and hook calling mechanisms for python"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
-    {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
-]
-
-[package.extras]
-dev = ["pre-commit", "tox"]
-testing = ["pytest", "pytest-benchmark"]
-
-[[package]]
-name = "pylint"
-version = "3.1.0"
-description = "python code static checker"
-category = "dev"
-optional = false
-python-versions = ">=3.8.0"
-files = [
-    {file = "pylint-3.1.0-py3-none-any.whl", hash = "sha256:507a5b60953874766d8a366e8e8c7af63e058b26345cfcb5f91f89d987fd6b74"},
-    {file = "pylint-3.1.0.tar.gz", hash = "sha256:6a69beb4a6f63debebaab0a3477ecd0f559aa726af4954fc948c51f7a2549e23"},
-]
-
-[package.dependencies]
-astroid = ">=3.1.0,<=3.2.0-dev0"
-colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
-dill = [
-    {version = ">=0.3.6", markers = "python_version >= \"3.11\""},
-    {version = ">=0.3.7", markers = "python_version >= \"3.12\""},
-]
-isort = ">=4.2.5,<5.13.0 || >5.13.0,<6"
-mccabe = ">=0.6,<0.8"
-platformdirs = ">=2.2.0"
-tomlkit = ">=0.10.1"
-
-[package.extras]
-spelling = ["pyenchant (>=3.2,<4.0)"]
-testutils = ["gitpython (>3)"]
-
-[[package]]
-name = "pytest"
-version = "8.1.1"
-description = "pytest: simple powerful testing with Python"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"},
-    {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"},
-]
-
-[package.dependencies]
-colorama = {version = "*", markers = "sys_platform == \"win32\""}
-iniconfig = "*"
-packaging = "*"
-pluggy = ">=1.4,<2.0"
-
-[package.extras]
-testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
-
-[[package]]
-name = "pytest-cov"
-version = "4.1.0"
-description = "Pytest plugin for measuring coverage."
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
-    {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
-]
-
-[package.dependencies]
-coverage = {version = ">=5.2.1", extras = ["toml"]}
-pytest = ">=4.6"
-
-[package.extras]
-testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
-
-[[package]]
-name = "tomlkit"
-version = "0.12.4"
-description = "Style preserving TOML library"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "tomlkit-0.12.4-py3-none-any.whl", hash = "sha256:5cd82d48a3dd89dee1f9d64420aa20ae65cfbd00668d6f094d7578a78efbb77b"},
-    {file = "tomlkit-0.12.4.tar.gz", hash = "sha256:7ca1cfc12232806517a8515047ba66a19369e71edf2439d0f5824f91032b6cc3"},
-]
-
-[[package]]
-name = "typing-extensions"
-version = "4.10.0"
-description = "Backported and Experimental Type Hints for Python 3.8+"
-category = "dev"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"},
-    {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"},
-]
-
-[[package]]
-name = "yarl"
-version = "1.9.4"
-description = "Yet another URL library"
-category = "dev"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
-    {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
-    {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
-    {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
-    {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
-    {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
-    {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
-    {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
-    {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
-    {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
-    {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
-    {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
-    {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
-    {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
-    {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
-    {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
-    {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
-    {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
-    {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
-    {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
-    {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
-    {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
-    {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
-    {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
-    {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
-    {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
-    {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
-    {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
-    {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
-    {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
-    {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
-    {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
-    {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
-    {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
-    {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
-    {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
-    {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
-    {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
-    {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
-    {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
-    {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
-    {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
-    {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
-    {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
-    {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
-    {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
-    {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
-    {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
-    {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
-    {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
-    {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
-    {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
-    {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
-    {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
-    {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
-    {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
-    {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
-    {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
-    {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
-    {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
-    {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
-    {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
-    {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
-    {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
-    {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
-    {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
-    {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
-    {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
-    {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
-    {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
-    {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
-    {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
-    {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
-    {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
-    {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
-    {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
-    {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
-    {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
-    {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
-    {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
-    {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
-    {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
-    {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
-    {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
-    {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
-    {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
-    {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
-    {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
-    {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
-    {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
-]
-
-[package.dependencies]
-idna = ">=2.0"
-multidict = ">=4.0"
-
-[metadata]
-lock-version = "2.0"
-python-versions = "^3.11"
-content-hash = "1f309cb49dfa595245acea78fc7289db3ffb572fcec586a7980ef6fbfeb84b0e"
diff --git a/pylintrc b/pylintrc
new file mode 100644
index 0000000..6479b3c
--- /dev/null
+++ b/pylintrc
@@ -0,0 +1,12 @@
+[MESSAGES CONTROL]
+# Some of these should be cleaned up, but disable them for now so I can check
+# this in. The too-many-* refactoring warnings will probably remain on for all
+# time, but naming and docstrings can and should be fixed.
+disable=missing-docstring,invalid-name,fixme,design,locally-disabled,too-many-lines,no-else-return,no-self-use,assignment-from-none
+
+
+[BASIC]
+good-names=i,j,k,ex,Run,_
+
+[VARIABLES]
+dummy-variables-rgx=_|dummy
diff --git a/pyproject.toml b/pyproject.toml
deleted file mode 100644
index c975a1f..0000000
--- a/pyproject.toml
+++ /dev/null
@@ -1,100 +0,0 @@
-[tool.poetry]
-name = "ndk"
-version = "0.1.0"
-description = "Android NDK build and test environment."
-authors = ["The Android Open Source Project"]
-license = "Apache-2.0"
-
-[tool.poetry.scripts]
-update-prebuilt-ndk = "ndk.tools.ndkgitprebuilts:App.main"
-
-[tool.poetry.dependencies]
-python = "^3.11"
-
-[tool.poetry.group.dev.dependencies]
-click = "^8.1.3"
-fetchartifact = { path = "../development/python-packages/fetchartifact", develop = true }
-aiohttp = "^3.8.4"
-mypy = "^1.9.0"
-pylint = "^3.1.0"
-pytest = "^8.1.1"
-black = "^24.2.0"
-isort = "^5.13.2"
-gdbrunner = { path = "../development/python-packages/gdbrunner" }
-adb = { path = "../development/python-packages/adb" }
-pytest-cov = "^4.1.0"
-
-[tool.black]
-# The default includes "build", which excludes both build/ and tests/build.
-exclude = """
-/(
-  |.git
-  |.venv
-  |venv
-  |.mypy_cache
-)/
-"""
-
-[tool.isort]
-profile = "black"
-
-[tool.mypy]
-check_untyped_defs = true
-# TODO: Investigate fixing type signatures for mocks.
-# disallow_any_decorated = True
-# This flags a *lot* of things since things like json.load return Any.
-# disallow_any_expr = True
-disallow_any_generics = true
-disallow_untyped_decorators = true
-disallow_untyped_defs = true
-follow_imports = "silent"
-implicit_reexport = false
-namespace_packages = true
-no_implicit_optional = true
-show_error_codes = true
-strict_equality = true
-warn_redundant_casts = true
-warn_return_any = true
-warn_unreachable = true
-warn_unused_configs = true
-warn_unused_ignores = true
-disallow_any_unimported = true
-disallow_subclassing_any = true
-exclude = [
-  "^tests/",
-  "^build/tools/make_standalone_toolchain.py$",
-  "^parse_elfnote.py$",
-  "^scripts/update_dac.py$",
-  "^scripts/gen_release_table.py$",
-  "^scripts/create_windows_instance.py$",
-]
-
-[tool.pylint]
-jobs = 0
-disable = [
-  "assignment-from-none",
-  "consider-using-f-string",
-  "consider-using-with",
-  "duplicate-code",
-  "fixme,design",
-  "invalid-name",
-  "line-too-long",           # black handles this
-  "locally-disabled",
-  "missing-docstring",
-  "too-many-lines",
-]
-good-names = ["i", "j", "k", "ex", "Run", "_"]
-dummy-variables-rgx = "_"
-ignore-imports = true
-
-[tool.pytest.ini_options]
-# Required for testing ndkstack.py and ndkgdb.py, since the default import mode
-# prepends the directory of the test (which in those cases would be named
-# ndkstack or ndkgdb...) to sys.path, so `import ndkstack` finds the test
-# package rather than the module under test.
-addopts = "--import-mode=importlib"
-xfail_strict = true
-
-[build-system]
-requires = ["poetry-core>=1.0.0"]
-build-backend = "poetry.core.masonry.api"
diff --git a/qa_config.json b/qa_config.json
index 4c9595c..dde5c1e 100644
--- a/qa_config.json
+++ b/qa_config.json
@@ -1,15 +1,12 @@
 {
   "devices": {
-    "21": [
-      "armeabi-v7a",
-      "arm64-v8a"
+    "16": [
+      "armeabi-v7a"
     ],
     "29": [
-      "x86"
-    ],
-    "34": [
       "armeabi-v7a",
       "arm64-v8a",
+      "x86",
       "x86_64"
     ]
   }
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..c67d2de
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,16 @@
+# pylint and its dependencies.
+astroid==1.5.3
+backports.functools-lru-cache==1.4
+configparser==3.5.0
+enum34==1.1.6
+isort==4.2.15
+lazy-object-proxy==1.3.1
+mccabe==0.6.1
+pylint==1.7.4
+singledispatch==3.4.0.3
+six==1.11.0
+wrapt==1.10.11
+
+# Local projects.
+-e ../development/python-packages/adb
+-e ../external/llvm/utils/lit
diff --git a/run_tests.py b/run_tests.py
index 9db8f37..53b5eba 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2017 The Android Open Source Project
 #
@@ -22,10 +22,5 @@
 import ndk.run_tests
 
 
-def main() -> None:
-    """Trampoline into the test runner defined in the ndk package."""
+if __name__ == '__main__':
     ndk.run_tests.main()
-
-
-if __name__ == "__main__":
-    main()
diff --git a/samples/NdkGdbSample/.gitignore b/samples/NdkGdbSample/.gitignore
index 53f6083..39b6783 100644
--- a/samples/NdkGdbSample/.gitignore
+++ b/samples/NdkGdbSample/.gitignore
@@ -1,2 +1,65 @@
-/.gradle/
-/.idea/
+# Built application files
+*.apk
+*.ap_
+
+# Files for the ART/Dalvik VM
+*.dex
+
+# Java class files
+*.class
+
+# Generated files
+bin/
+gen/
+out/
+
+# Gradle files
+.gradle/
+build/
+
+# Local configuration file (sdk path, etc)
+local.properties
+
+# Proguard folder generated by Eclipse
+proguard/
+
+# Log Files
+*.log
+
+# Android Studio Navigation editor temp files
+.navigation/
+
+# Android Studio captures folder
+captures/
+
+# IntelliJ
+*.iml
+.idea/workspace.xml
+.idea/tasks.xml
+.idea/gradle.xml
+.idea/assetWizardSettings.xml
+.idea/dictionaries
+.idea/libraries
+.idea/caches
+
+# Keystore files
+# Uncomment the following line if you do not want to check your keystore files in.
+#*.jks
+
+# External native build folder generated in Android Studio 2.2 and later
+.externalNativeBuild
+
+# Google Services (e.g. APIs or Firebase)
+google-services.json
+
+# Freeline
+freeline.py
+freeline/
+freeline_project_description.json
+
+# fastlane
+fastlane/report.xml
+fastlane/Preview.html
+fastlane/screenshots
+fastlane/test_output
+fastlane/readme.md
diff --git a/samples/NdkGdbSample/.idea/codeStyles/Project.xml b/samples/NdkGdbSample/.idea/codeStyles/Project.xml
new file mode 100644
index 0000000..30aa626
--- /dev/null
+++ b/samples/NdkGdbSample/.idea/codeStyles/Project.xml
@@ -0,0 +1,29 @@
+<component name="ProjectCodeStyleConfiguration">
+  <code_scheme name="Project" version="173">
+    <Objective-C-extensions>
+      <file>
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Import" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Macro" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Typedef" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Enum" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Constant" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Global" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Struct" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="FunctionPredecl" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Function" />
+      </file>
+      <class>
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Property" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="Synthesize" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InitMethod" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="StaticMethod" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="InstanceMethod" />
+        <option name="com.jetbrains.cidr.lang.util.OCDeclarationKind" value="DeallocMethod" />
+      </class>
+      <extensions>
+        <pair source="cpp" header="h" fileNamingConvention="NONE" />
+        <pair source="c" header="h" fileNamingConvention="NONE" />
+      </extensions>
+    </Objective-C-extensions>
+  </code_scheme>
+</component>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/.idea/misc.xml b/samples/NdkGdbSample/.idea/misc.xml
new file mode 100644
index 0000000..99202cc
--- /dev/null
+++ b/samples/NdkGdbSample/.idea/misc.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="NullableNotNullManager">
+    <option name="myDefaultNullable" value="android.support.annotation.Nullable" />
+    <option name="myDefaultNotNull" value="android.support.annotation.NonNull" />
+    <option name="myNullables">
+      <value>
+        <list size="5">
+          <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.Nullable" />
+          <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nullable" />
+          <item index="2" class="java.lang.String" itemvalue="javax.annotation.CheckForNull" />
+          <item index="3" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.Nullable" />
+          <item index="4" class="java.lang.String" itemvalue="android.support.annotation.Nullable" />
+        </list>
+      </value>
+    </option>
+    <option name="myNotNulls">
+      <value>
+        <list size="4">
+          <item index="0" class="java.lang.String" itemvalue="org.jetbrains.annotations.NotNull" />
+          <item index="1" class="java.lang.String" itemvalue="javax.annotation.Nonnull" />
+          <item index="2" class="java.lang.String" itemvalue="edu.umd.cs.findbugs.annotations.NonNull" />
+          <item index="3" class="java.lang.String" itemvalue="android.support.annotation.NonNull" />
+        </list>
+      </value>
+    </option>
+  </component>
+  <component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" project-jdk-name="1.8" project-jdk-type="JavaSDK">
+    <output url="file://$PROJECT_DIR$/build/classes" />
+  </component>
+  <component name="ProjectType">
+    <option name="id" value="Android" />
+  </component>
+</project>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/.idea/modules.xml b/samples/NdkGdbSample/.idea/modules.xml
new file mode 100644
index 0000000..867be72
--- /dev/null
+++ b/samples/NdkGdbSample/.idea/modules.xml
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="ProjectModuleManager">
+    <modules>
+      <module fileurl="file://$PROJECT_DIR$/NdkGdbSample.iml" filepath="$PROJECT_DIR$/NdkGdbSample.iml" />
+      <module fileurl="file://$PROJECT_DIR$/app/app.iml" filepath="$PROJECT_DIR$/app/app.iml" />
+    </modules>
+  </component>
+</project>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/.idea/runConfigurations.xml b/samples/NdkGdbSample/.idea/runConfigurations.xml
new file mode 100644
index 0000000..7f68460
--- /dev/null
+++ b/samples/NdkGdbSample/.idea/runConfigurations.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project version="4">
+  <component name="RunConfigurationProducerService">
+    <option name="ignoredProducers">
+      <set>
+        <option value="org.jetbrains.plugins.gradle.execution.test.runner.AllInPackageGradleConfigurationProducer" />
+        <option value="org.jetbrains.plugins.gradle.execution.test.runner.TestClassGradleConfigurationProducer" />
+        <option value="org.jetbrains.plugins.gradle.execution.test.runner.TestMethodGradleConfigurationProducer" />
+      </set>
+    </option>
+  </component>
+</project>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/AndroidManifest.xml b/samples/NdkGdbSample/AndroidManifest.xml
index d44eb50..9b975be 120000
--- a/samples/NdkGdbSample/AndroidManifest.xml
+++ b/samples/NdkGdbSample/AndroidManifest.xml
@@ -1 +1 @@
-app/build/intermediates/packaged_manifests/debug/AndroidManifest.xml
\ No newline at end of file
+app/build/intermediates/manifests/full/debug/AndroidManifest.xml
\ No newline at end of file
diff --git a/samples/NdkGdbSample/README.md b/samples/NdkGdbSample/README.md
index 131799b..75e8067 100644
--- a/samples/NdkGdbSample/README.md
+++ b/samples/NdkGdbSample/README.md
@@ -4,6 +4,6 @@
 following in this directory:
 
 ```bash
-$ ANDROID_SDK_ROOT=<path-to-android-sdk> ./gradlew installDebug
+$ ./gradlew installDebug
 $ $NDK/ndk-gdb --launch
 ```
diff --git a/samples/NdkGdbSample/app/.gitignore b/samples/NdkGdbSample/app/.gitignore
index 84c048a..796b96d 100644
--- a/samples/NdkGdbSample/app/.gitignore
+++ b/samples/NdkGdbSample/app/.gitignore
@@ -1 +1 @@
-/build/
+/build
diff --git a/samples/NdkGdbSample/app/build.gradle b/samples/NdkGdbSample/app/build.gradle
index be3cbad..6c7be60 100644
--- a/samples/NdkGdbSample/app/build.gradle
+++ b/samples/NdkGdbSample/app/build.gradle
@@ -1,20 +1,38 @@
-plugins {
-    id "com.android.application"
-}
+apply plugin: 'com.android.application'
+
+apply plugin: 'kotlin-android'
+
+apply plugin: 'kotlin-android-extensions'
 
 android {
-    compileSdkVersion 31
-    ndkVersion "23.1.7779620"
+    compileSdkVersion 28
     defaultConfig {
         applicationId "com.android.developer.ndkgdbsample"
-        minSdkVersion 19
-        targetSdkVersion 31
+        minSdkVersion 16
+        targetSdkVersion 28
         versionCode 1
         versionName "1.0"
+        testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
+    }
+    buildTypes {
+        release {
+            minifyEnabled false
+            proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+        }
     }
     externalNativeBuild {
         ndkBuild {
-            path "src/main/cpp/Android.mk"
+            path 'src/main/cpp/Android.mk'
         }
     }
 }
+
+dependencies {
+    implementation fileTree(dir: 'libs', include: ['*.jar'])
+    implementation"org.jetbrains.kotlin:kotlin-stdlib-jre7:$kotlin_version"
+    implementation 'com.android.support:appcompat-v7:28.0.0'
+    implementation 'com.android.support.constraint:constraint-layout:1.1.3'
+    testImplementation 'junit:junit:4.12'
+    androidTestImplementation 'com.android.support.test:runner:1.0.2'
+    androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
+}
diff --git a/samples/NdkGdbSample/app/proguard-rules.pro b/samples/NdkGdbSample/app/proguard-rules.pro
new file mode 100644
index 0000000..f1b4245
--- /dev/null
+++ b/samples/NdkGdbSample/app/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+#   http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+#   public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/samples/NdkGdbSample/app/src/androidTest/java/com/android/developer/ndkgdbsample/ExampleInstrumentedTest.kt b/samples/NdkGdbSample/app/src/androidTest/java/com/android/developer/ndkgdbsample/ExampleInstrumentedTest.kt
new file mode 100644
index 0000000..7ac76c3
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/androidTest/java/com/android/developer/ndkgdbsample/ExampleInstrumentedTest.kt
@@ -0,0 +1,24 @@
+package com.android.developer.ndkgdbsample
+
+import android.support.test.InstrumentationRegistry
+import android.support.test.runner.AndroidJUnit4
+
+import org.junit.Test
+import org.junit.runner.RunWith
+
+import org.junit.Assert.*
+
+/**
+ * Instrumented test, which will execute on an Android device.
+ *
+ * See [testing documentation](http://d.android.com/tools/testing).
+ */
+@RunWith(AndroidJUnit4::class)
+class ExampleInstrumentedTest {
+    @Test
+    fun useAppContext() {
+        // Context of the app under test.
+        val appContext = InstrumentationRegistry.getTargetContext()
+        assertEquals("com.android.developer.ndkgdbsample", appContext.packageName)
+    }
+}
diff --git a/samples/NdkGdbSample/app/src/main/AndroidManifest.xml b/samples/NdkGdbSample/app/src/main/AndroidManifest.xml
index 5c8dd8f..9e02ae4 100644
--- a/samples/NdkGdbSample/app/src/main/AndroidManifest.xml
+++ b/samples/NdkGdbSample/app/src/main/AndroidManifest.xml
@@ -1,12 +1,21 @@
 <?xml version="1.0" encoding="utf-8"?>
 <manifest xmlns:android="http://schemas.android.com/apk/res/android"
     package="com.android.developer.ndkgdbsample">
-    <application android:label="NdkGdbSample">
-        <activity android:name=".MainActivity" android:exported="true">
+
+    <application
+        android:allowBackup="true"
+        android:icon="@mipmap/ic_launcher"
+        android:label="@string/app_name"
+        android:roundIcon="@mipmap/ic_launcher_round"
+        android:supportsRtl="true"
+        android:theme="@style/AppTheme">
+        <activity android:name=".MainActivity">
             <intent-filter>
                 <action android:name="android.intent.action.MAIN" />
+
                 <category android:name="android.intent.category.LAUNCHER" />
             </intent-filter>
         </activity>
     </application>
-</manifest>
+
+</manifest>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/app/src/main/java/com/android/developer/ndkgdbsample/MainActivity.java b/samples/NdkGdbSample/app/src/main/java/com/android/developer/ndkgdbsample/MainActivity.java
deleted file mode 100644
index b444971..0000000
--- a/samples/NdkGdbSample/app/src/main/java/com/android/developer/ndkgdbsample/MainActivity.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package com.android.developer.ndkgdbsample;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.widget.TextView;
-
-public class MainActivity extends Activity {
-
-    @Override
-    protected void onCreate(Bundle savedInstanceState) {
-        super.onCreate(savedInstanceState);
-        TextView tv = new TextView(this);
-        setContentView(tv);
-        tv.setText(getHelloString());
-    }
-
-    private native String getHelloString();
-
-    static {
-        System.loadLibrary("app");
-    }
-}
diff --git a/samples/NdkGdbSample/app/src/main/java/com/android/developer/ndkgdbsample/MainActivity.kt b/samples/NdkGdbSample/app/src/main/java/com/android/developer/ndkgdbsample/MainActivity.kt
new file mode 100644
index 0000000..79f4e51
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/java/com/android/developer/ndkgdbsample/MainActivity.kt
@@ -0,0 +1,24 @@
+package com.android.developer.ndkgdbsample
+
+import android.support.v7.app.AppCompatActivity
+import android.os.Bundle
+import android.view.View
+import android.widget.TextView
+
+class MainActivity : AppCompatActivity() {
+
+    override fun onCreate(savedInstanceState: Bundle?) {
+        super.onCreate(savedInstanceState)
+        setContentView(R.layout.activity_main)
+        val tv = findViewById<View>(R.id.hello_textview) as TextView
+        tv.text = getHelloString()
+    }
+
+    private external fun getHelloString(): String
+
+    companion object {
+        init {
+            System.loadLibrary("app")
+        }
+    }
+}
diff --git a/samples/NdkGdbSample/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/samples/NdkGdbSample/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
new file mode 100644
index 0000000..c7bd21d
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
@@ -0,0 +1,34 @@
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:aapt="http://schemas.android.com/aapt"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportHeight="108"
+    android:viewportWidth="108">
+    <path
+        android:fillType="evenOdd"
+        android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
+        android:strokeColor="#00000000"
+        android:strokeWidth="1">
+        <aapt:attr name="android:fillColor">
+            <gradient
+                android:endX="78.5885"
+                android:endY="90.9159"
+                android:startX="48.7653"
+                android:startY="61.0927"
+                android:type="linear">
+                <item
+                    android:color="#44000000"
+                    android:offset="0.0" />
+                <item
+                    android:color="#00000000"
+                    android:offset="1.0" />
+            </gradient>
+        </aapt:attr>
+    </path>
+    <path
+        android:fillColor="#FFFFFF"
+        android:fillType="nonZero"
+        android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
+        android:strokeColor="#00000000"
+        android:strokeWidth="1" />
+</vector>
diff --git a/samples/NdkGdbSample/app/src/main/res/drawable/ic_launcher_background.xml b/samples/NdkGdbSample/app/src/main/res/drawable/ic_launcher_background.xml
new file mode 100644
index 0000000..d5fccc5
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+<?xml version="1.0" encoding="utf-8"?>
+<vector xmlns:android="http://schemas.android.com/apk/res/android"
+    android:width="108dp"
+    android:height="108dp"
+    android:viewportHeight="108"
+    android:viewportWidth="108">
+    <path
+        android:fillColor="#26A69A"
+        android:pathData="M0,0h108v108h-108z" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M9,0L9,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,0L19,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,0L29,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,0L39,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,0L49,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,0L59,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,0L69,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,0L79,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M89,0L89,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M99,0L99,108"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,9L108,9"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,19L108,19"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,29L108,29"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,39L108,39"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,49L108,49"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,59L108,59"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,69L108,69"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,79L108,79"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,89L108,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M0,99L108,99"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,29L89,29"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,39L89,39"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,49L89,49"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,59L89,59"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,69L89,69"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M19,79L89,79"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M29,19L29,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M39,19L39,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M49,19L49,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M59,19L59,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M69,19L69,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+    <path
+        android:fillColor="#00000000"
+        android:pathData="M79,19L79,89"
+        android:strokeColor="#33FFFFFF"
+        android:strokeWidth="0.8" />
+</vector>
diff --git a/samples/NdkGdbSample/app/src/main/res/layout/activity_main.xml b/samples/NdkGdbSample/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..6cbe2fd
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,19 @@
+<?xml version="1.0" encoding="utf-8"?>
+<android.support.constraint.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
+    xmlns:app="http://schemas.android.com/apk/res-auto"
+    xmlns:tools="http://schemas.android.com/tools"
+    android:layout_width="match_parent"
+    android:layout_height="match_parent"
+    tools:context=".MainActivity">
+
+    <TextView
+        android:id="@+id/hello_textview"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:text="Hello World!"
+        app:layout_constraintBottom_toBottomOf="parent"
+        app:layout_constraintLeft_toLeftOf="parent"
+        app:layout_constraintRight_toRightOf="parent"
+        app:layout_constraintTop_toTopOf="parent" />
+
+</android.support.constraint.ConstraintLayout>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/samples/NdkGdbSample/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/samples/NdkGdbSample/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
new file mode 100644
index 0000000..eca70cf
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="utf-8"?>
+<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
+    <background android:drawable="@drawable/ic_launcher_background" />
+    <foreground android:drawable="@drawable/ic_launcher_foreground" />
+</adaptive-icon>
\ No newline at end of file
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-hdpi/ic_launcher.png b/samples/NdkGdbSample/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..a2f5908
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-hdpi/ic_launcher.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/samples/NdkGdbSample/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
new file mode 100644
index 0000000..1b52399
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-mdpi/ic_launcher.png b/samples/NdkGdbSample/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..ff10afd
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-mdpi/ic_launcher.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/samples/NdkGdbSample/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
new file mode 100644
index 0000000..115a4c7
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/samples/NdkGdbSample/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..dcd3cd8
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-xhdpi/ic_launcher.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/samples/NdkGdbSample/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
new file mode 100644
index 0000000..459ca60
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/samples/NdkGdbSample/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..8ca12fe
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/samples/NdkGdbSample/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
new file mode 100644
index 0000000..8e19b41
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/samples/NdkGdbSample/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..b824ebd
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/samples/NdkGdbSample/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
new file mode 100644
index 0000000..4c19a13
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
Binary files differ
diff --git a/samples/NdkGdbSample/app/src/main/res/values/colors.xml b/samples/NdkGdbSample/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..3ab3e9c
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    <color name="colorPrimary">#3F51B5</color>
+    <color name="colorPrimaryDark">#303F9F</color>
+    <color name="colorAccent">#FF4081</color>
+</resources>
diff --git a/samples/NdkGdbSample/app/src/main/res/values/strings.xml b/samples/NdkGdbSample/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..778df24
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+<resources>
+    <string name="app_name">NdkGdbSample</string>
+</resources>
diff --git a/samples/NdkGdbSample/app/src/main/res/values/styles.xml b/samples/NdkGdbSample/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..5885930
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/main/res/values/styles.xml
@@ -0,0 +1,11 @@
+<resources>
+
+    <!-- Base application theme. -->
+    <style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
+        <!-- Customize your theme here. -->
+        <item name="colorPrimary">@color/colorPrimary</item>
+        <item name="colorPrimaryDark">@color/colorPrimaryDark</item>
+        <item name="colorAccent">@color/colorAccent</item>
+    </style>
+
+</resources>
diff --git a/samples/NdkGdbSample/app/src/test/java/com/android/developer/ndkgdbsample/ExampleUnitTest.kt b/samples/NdkGdbSample/app/src/test/java/com/android/developer/ndkgdbsample/ExampleUnitTest.kt
new file mode 100644
index 0000000..aecdfa5
--- /dev/null
+++ b/samples/NdkGdbSample/app/src/test/java/com/android/developer/ndkgdbsample/ExampleUnitTest.kt
@@ -0,0 +1,17 @@
+package com.android.developer.ndkgdbsample
+
+import org.junit.Test
+
+import org.junit.Assert.*
+
+/**
+ * Example local unit test, which will execute on the development machine (host).
+ *
+ * See [testing documentation](http://d.android.com/tools/testing).
+ */
+class ExampleUnitTest {
+    @Test
+    fun addition_isCorrect() {
+        assertEquals(4, 2 + 2)
+    }
+}
diff --git a/samples/NdkGdbSample/build.gradle b/samples/NdkGdbSample/build.gradle
index 6203186..6c5f154 100644
--- a/samples/NdkGdbSample/build.gradle
+++ b/samples/NdkGdbSample/build.gradle
@@ -1,12 +1,14 @@
 // Top-level build file where you can add configuration options common to all sub-projects/modules.
 
 buildscript {
+    ext.kotlin_version = '1.2.71'
     repositories {
         google()
-        mavenCentral()
+        jcenter()
     }
     dependencies {
-        classpath "com.android.tools.build:gradle:7.0.3"
+        classpath 'com.android.tools.build:gradle:3.1.0-beta4'
+        classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
 
         // NOTE: Do not place your application dependencies here; they belong
         // in the individual module build.gradle files
@@ -16,7 +18,7 @@
 allprojects {
     repositories {
         google()
-        mavenCentral()
+        jcenter()
     }
 }
 
diff --git a/samples/NdkGdbSample/gradle/wrapper/gradle-wrapper.properties b/samples/NdkGdbSample/gradle/wrapper/gradle-wrapper.properties
index 6c48c16..1ea70ce 100644
--- a/samples/NdkGdbSample/gradle/wrapper/gradle-wrapper.properties
+++ b/samples/NdkGdbSample/gradle/wrapper/gradle-wrapper.properties
@@ -3,4 +3,4 @@
 distributionPath=wrapper/dists
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-7.0.2-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-4.4-all.zip
diff --git a/scripts/create_windows_instance.py b/scripts/create_windows_instance.py
index 3b5cb90..15b842f 100755
--- a/scripts/create_windows_instance.py
+++ b/scripts/create_windows_instance.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2017 The Android Open Source Project
 #
@@ -21,13 +21,13 @@
 import subprocess
 import textwrap
 import time
-
-import winrm  # pylint:disable=import-error
+import winrm
 import yaml
 
+
 THIS_DIR = os.path.dirname(os.path.realpath(__file__))
-GCE_IMAGE = "windows-server-2016-dc-v20171010"
-GCE_IMAGE_PROJECT = "windows-cloud"
+GCE_IMAGE = 'windows-server-2016-dc-v20171010'
+GCE_IMAGE_PROJECT = 'windows-cloud'
 
 
 def logger():
@@ -37,69 +37,48 @@
 
 def check_output(cmd, *args, **kwargs):
     """subprocess.check_output with logging."""
-    logger().debug("check_output: %s", " ".join(cmd))
+    logger().debug('check_output: %s', ' '.join(cmd))
     return subprocess.check_output(cmd, *args, **kwargs)
 
 
 def gcloud_compute(project, cmd):
     """Runs a gcloud compute command for the given project."""
-    return check_output(["gcloud", "compute", "--project", project] + cmd)
+    return check_output(['gcloud', 'compute', '--project', project] + cmd)
 
 
 def create_vm(args):
     """Creates a VM in GCE."""
-    logger().info("Creating VM %s.", args.name)
-    sysprep_file = os.path.join(THIS_DIR, "../infra/windows_sysprep.ps1")
-    gcloud_compute(
-        args.project,
-        [
-            "instances",
-            "create",
-            args.name,
-            "--zone",
-            args.zone,
-            "--machine-type",
-            args.machine_type,
-            "--image-project",
-            GCE_IMAGE_PROJECT,
-            "--image",
-            GCE_IMAGE,
-            "--boot-disk-type",
-            "pd-ssd",
-            "--boot-disk-size",
-            str(args.disk_size),
-            "--tags",
-            "windows",
-            "--metadata-from-file",
-            "sysprep-specialize-script-ps1={}".format(sysprep_file),
-        ],
-    )
+    logger().info('Creating VM %s.', args.name)
+    sysprep_file = os.path.join(THIS_DIR, '../infra/windows_sysprep.ps1')
+    gcloud_compute(args.project, [
+        'instances', 'create', args.name,
+        '--zone', args.zone,
+        '--machine-type', args.machine_type,
+        '--image-project', GCE_IMAGE_PROJECT,
+        '--image', GCE_IMAGE,
+        '--boot-disk-type', 'pd-ssd',
+        '--boot-disk-size', str(args.disk_size),
+        '--tags', 'windows',
+        '--metadata-from-file', 'sysprep-specialize-script-ps1={}'.format(
+            sysprep_file),
+    ])
 
 
 def create_firewall_rule(project, name, allow, source_ranges, target_tags):
     """Creates a firewall rule for the given project."""
-    logger().info("Creating %s firewall rule.", name)
-    gcloud_compute(
-        project,
-        [
-            "firewall-rules",
-            "create",
-            name,
-            "--allow",
-            allow,
-            "--source-ranges",
-            source_ranges,
-            "--target-tags",
-            target_tags,
-        ],
-    )
+    logger().info('Creating %s firewall rule.', name)
+    gcloud_compute(project, [
+        'firewall-rules', 'create', name,
+        '--allow', allow,
+        '--source-ranges', source_ranges,
+        '--target-tags', target_tags,
+    ])
 
 
 def get_serial_port_contents(project, zone, name):
     """Gets the serial port contents for the given machine."""
     return gcloud_compute(
-        project, ["instances", "get-serial-port-output", name, "--zone", zone]
-    )
+        project, ['instances', 'get-serial-port-output', name, '--zone', zone])
 
 
 def wait_for_activation_complete(project, zone, name):
@@ -110,64 +89,63 @@
     """
     while True:
         out = get_serial_port_contents(project, zone, name)
-        if "Activation successful." in out:
-            logger().info("Machine is up.")
+        if 'Activation successful.' in out:
+            logger().info('Machine is up.')
             return
         else:
             retry_time = 10
-            logger().info("Machine still not up. Sleeping for %s seconds.", retry_time)
+            logger().info(
+                'Machine still not up. Sleeping for %s seconds.', retry_time)
             time.sleep(retry_time)
 
 
 def get_instance_info(project, zone, name):
     """Returns the parsed result of gcloud compute instances describe."""
-    data = gcloud_compute(project, ["instances", "describe", name, "--zone", zone])
-    return yaml.safe_load(data)
+    data = gcloud_compute(
+        project, ['instances', 'describe', name, '--zone', zone])
+    return yaml.load(data)
 
 
 def reset_windows_password(project, zone, name):
     """Resets the password and returns a tupe of (username, password)."""
     cmd = [
-        "gcloud",
-        "compute",
-        "--project",
-        project,
-        "reset-windows-password",
-        "--zone",
-        zone,
-        name,
+        'gcloud', 'compute', '--project', project,
+        'reset-windows-password', '--zone', zone, name
     ]
     proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE)
-    proc.stdin.write("Y")
+    proc.stdin.write('Y')
     out, _ = proc.communicate()
     if proc.returncode != 0:
-        raise RuntimeError("reset-windows-password failed")
+        raise RuntimeError('reset-windows-password failed')
 
     username = None
     password = None
     for line in out.splitlines():
-        key, value = line.split(":")
+        key, value = line.split(':')
         key = key.strip()
         value = value.strip()
 
-        if key == "username":
+        if key == 'username':
             username = value
-        elif key == "password":
+        elif key == 'password':
             password = value
 
     if username is None:
-        raise RuntimeError("Could not find username in output:\n{}".format(out))
+        raise RuntimeError(
+            'Could not find username in output:\n{}'.format(out))
     if password is None:
-        raise RuntimeError("Could not find password in output:\n{}".format(out))
+        raise RuntimeError(
+            'Could not find password in output:\n{}'.format(out))
 
     return username, password
 
 
 def test_winrm_connection(host, user, password):
     """Checks that we can execute a basic WinRM command."""
-    logger().info("Testing WinRM connection.")
-    url = "https://{}:5986".format(host)
-    session = winrm.Session(url, auth=(user, password), server_cert_validation="ignore")
+    logger().info('Testing WinRM connection.')
+    url = 'https://{}:5986'.format(host)
+    session = winrm.Session(
+        url, auth=(user, password), server_cert_validation='ignore')
     session.run_ps('echo "Hello, world!"')
 
 
@@ -176,28 +154,24 @@
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        "--zone", default="us-west1-b", help="Zone the VM will be created in."
-    )
+        '--zone', default='us-west1-b', help='Zone the VM will be created in.')
 
     parser.add_argument(
-        "--machine-type",
-        default="n1-standard-32",
-        help="GCE machine type. Defaults to 32 cores with 120GB RAM.",
-    )
-
-    parser.add_argument("--disk-size", type=int, default=256, help="VM disk size.")
+        '--machine-type', default='n1-standard-32',
+        help='GCE machine type. Defaults to 32 cores with 120GB RAM.')
 
     parser.add_argument(
-        "-v",
-        "--verbose",
-        action="count",
-        default=0,
-        help="Increase log level. Defaults to logging.WARNING.",
-    )
+        '--disk-size', type=int, default=256, help='VM disk size.')
 
-    parser.add_argument("project", metavar="PROJECT", help="GCE project to use.")
+    parser.add_argument(
+        '-v', '--verbose', action='count', default=0,
+        help='Increase log level. Defaults to logging.WARNING.')
 
-    parser.add_argument("name", metavar="NAME", help="Name to use for the instance.")
+    parser.add_argument(
+        'project', metavar='PROJECT', help='GCE project to use.')
+
+    parser.add_argument(
+        'name', metavar='NAME', help='Name to use for the instance.')
 
     return parser.parse_args()
 
@@ -212,37 +186,32 @@
     logging.basicConfig(level=log_level)
 
     create_vm(args)
-    out = gcloud_compute(args.project, ["firewall-rules", "list"])
-    if "winrm" not in out:
-        create_firewall_rule(args.project, "winrm", "tcp:5986", "0.0.0.0/0", "windows")
+    out = gcloud_compute(args.project, ['firewall-rules', 'list'])
+    if 'winrm' not in out:
+        create_firewall_rule(
+            args.project, 'winrm', 'tcp:5986', '0.0.0.0/0', 'windows')
     wait_for_activation_complete(args.project, args.zone, args.name)
 
     info = get_instance_info(args.project, args.zone, args.name)
-    host = info["networkInterfaces"][0]["accessConfigs"][0]["natIP"]
+    host = info['networkInterfaces'][0]['accessConfigs'][0]['natIP']
     user, password = reset_windows_password(args.project, args.zone, args.name)
     test_winrm_connection(host, user, password)
 
-    secrets_py = os.path.join(THIS_DIR, "..", "secrets.py")
-    logger().info("Writing connection information to %s", secrets_py)
-    with open(secrets_py, "w") as secrets_file:
+    secrets_py = os.path.join(THIS_DIR, '..', 'secrets.py')
+    logger().info('Writing connection information to %s', secrets_py)
+    with open(secrets_py, 'w') as secrets_file:
         # Write the password as a raw string in case it contains backslashes.
-        secrets_file.write(
-            textwrap.dedent(
-                """\
+        secrets_file.write(textwrap.dedent("""\
             GCE_HOST = {}
             GCE_USER = {}
             GCE_PASS = {}
-        """.format(
-                    repr(host), repr(user), repr(password)
-                )
-            )
-        )
-    os.chmod(secrets_py, 0o600)
-    logger().info("Setup completed successfully.")
-    logger().info("Host: %s", host)
-    logger().info("Username: %s", user)
-    logger().info("Password: %s", password)
+        """.format(repr(host), repr(user), repr(password))))
+    os.chmod(secrets_py, 0600)
+    logger().info('Setup completed successfully.')
+    logger().info('Host: %s', host)
+    logger().info('Username: %s', user)
+    logger().info('Password: %s', password)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/scripts/find_non_universal_binaries.sh b/scripts/find_non_universal_binaries.sh
deleted file mode 100755
index e4d5484..0000000
--- a/scripts/find_non_universal_binaries.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env bash
-! find "${1:-.}" -type f -print0 | xargs -0 file | grep "Mach-O" | grep -v "Mach-O universal binary"
diff --git a/scripts/gen_release_table.py b/scripts/gen_release_table.py
index 9f483a6..ec6ca57 100755
--- a/scripts/gen_release_table.py
+++ b/scripts/gen_release_table.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -15,15 +15,15 @@
 # limitations under the License.
 #
 """Generates an HTML table for the downloads page."""
-from __future__ import annotations
+from __future__ import print_function
 
 import argparse
 import logging
+import operator
+import os.path
 import re
 import sys
-from dataclasses import dataclass, field
-from pathlib import Path
-from typing import Optional
+
 
 # pylint: disable=design
 
@@ -32,8 +32,8 @@
     """Returns all stdin input until the first empty line."""
     lines = []
     while True:
-        line = input()
-        if line.strip() == "":
+        line = raw_input()  # pylint: disable=undefined-variable
+        if line.strip() == '':
             return lines
         lines.append(line)
 
@@ -42,151 +42,108 @@
     """Parses and returns command line arguments."""
     parser = argparse.ArgumentParser()
 
-    release_type_group = parser.add_mutually_exclusive_group()
-
-    release_type_group.add_argument(
-        "--beta", action="store_true", help="Generate content for a beta release."
-    )
-
-    release_type_group.add_argument(
-        "--lts", action="store_true", help="Generate content for an LTS release."
-    )
+    parser.add_argument(
+        '--beta', action='store_true',
+        help='Generate content for a beta release.')
 
     return parser.parse_args()
 
 
-@dataclass(frozen=True, order=True)
-class Artifact:
-    sort_index: int = field(init=False, repr=True)
-    host: str
-    package: str
-    size: int
-    sha: str
-
-    def __post_init__(self):
-        sort_order = {"windows": 1, "darwin": 2, "linux": 3}
-        object.__setattr__(self, "sort_index", sort_order.get(self.host, 4))
-
-    @property
-    def pretty_host(self) -> str:
-        return {
-            "darwin": "macOS",
-            "linux": "Linux",
-            "windows": "Windows",
-        }[self.host]
-
-    @classmethod
-    def from_line(cls, line: str) -> Optional[Artifact]:
-        # Some lines are updates to the repository.xml files used by the SDK
-        # manager. We don't care about these.
-        # <sha>        12,345  path/to/repository.xml
-        if line.endswith(".xml") or "android-ndk" not in line:
-            return None
-
-        # Real entries look like this (the leading hex number is optional):
-        # 0x1234 <sha>   123,456,789  path/to/android-ndk-r23-beta5-linux.zip
-        match = re.match(r"^(?:0x[0-9a-f]+)?\s*(\w+)\s+([0-9,]+)\s+(.+)$", line)
-        if match is None:
-            logging.error("Skipping unrecognized line: %s", line)
-            return None
-
-        sha = match.group(1)
-
-        size_str = match.group(2)
-        size = int(size_str.replace(",", ""))
-
-        path = Path(match.group(3))
-        if path.suffix == ".zip" and "darwin" in path.name:
-            # Ignore. We only publish the DMG on the web page.
-            return None
-
-        return Artifact(cls.host_from_package_path(path), path.name, size, sha)
-
-    @staticmethod
-    def host_from_package_path(path: Path) -> str:
-        # android-ndk-$VERSION-$HOST.$EXT
-        # $VERSION might contain a hyphen for beta/RC releases.
-        # Split on all hyphens and join $HOST and $EXT to get the platform.
-        return path.stem.split("-")[-1]
-
-
 def main():
     """Program entry point."""
     args = parse_args()
-    print(
-        'Paste the contents of the "New files" section of the SDK update '
-        "email here. Terminate with an empty line."
-    )
+    print('Paste the contents of the "New files" section of the SDK update '
+          'email here. Terminate with an empty line.')
     lines = get_lines()
     if not lines:
-        sys.exit("No input.")
+        sys.exit('No input.')
 
     # The user may have pasted the following header line:
     # SHA1                                              size  file
-    if lines[0].startswith("SHA1") or lines[0].lstrip().startswith("Link"):
+    if lines[0].startswith('SHA1') or lines[0].lstrip().startswith('Link'):
         lines = lines[1:]
 
     artifacts = []
     for line in lines:
-        if (artifact := Artifact.from_line(line)) is not None:
-            artifacts.append(artifact)
+        # Some lines are updates to the repository.xml files used by the SDK
+        # manager. We don't care about these.
+        # <sha>        12,345  path/to/repository.xml
+        if line.endswith('.xml'):
+            continue
 
-    # Sort the artifacts by the specific order.
-    artifacts = sorted(artifacts)
+        # Real entries look like this (the leading hex number is optional):
+        # 0x1234 <sha>   123,456,789  path/to/android-ndk-r11c-linux-x86_64.zip
+        match = re.match(
+            r'^(?:0x[0-9a-f]+)?\s*(\w+)\s+([0-9,]+)\s+(.+)$', line)
+        if match is None:
+            logging.error('Skipping unrecognized line: %s', line)
+            continue
 
-    print("For GitHub:")
-    print("<table>")
-    print("  <tr>")
-    print("    <th>Platform</th>")
-    print("    <th>Package</th>")
-    print("    <th>Size (Bytes)</th>")
-    print("    <th>SHA1 Checksum</th>")
-    print("  </tr>")
-    for artifact in artifacts:
-        url_base = "https://dl.google.com/android/repository/"
-        package_url = url_base + artifact.package
-        link = '<a href="{}">{}</a>'.format(package_url, artifact.package)
+        sha = match.group(1)
 
-        print("  <tr>")
-        print("    <td>{}</td>".format(artifact.pretty_host))
-        print("    <td>{}</td>".format(link))
-        print("    <td>{}</td>".format(artifact.size))
-        print("    <td>{}</td>".format(artifact.sha))
-        print("  </tr>")
-    print("</table>")
+        size_str = match.group(2)
+        size = int(size_str.replace(',', ''))
+
+        path = match.group(3)
+        package = os.path.basename(path)
+
+        # android-ndk-$VERSION-$HOST-$HOST_ARCH.$EXT
+        # $VERSION might contain a hyphen for beta/RC releases.
+        # Split on all hyphens and join the last two items to get the platform.
+        package_name = os.path.splitext(package)[0]
+        host = '-'.join(package_name.split('-')[-2:])
+        pretty_host = {
+            'darwin-x86_64': 'Mac OS X',
+            'linux-x86_64': 'Linux',
+            'windows-x86_64': 'Windows 64-bit',
+            'windows-x86': 'Windows 32-bit',
+        }[host]
+
+        artifacts.append((pretty_host, package, size, sha))
+
+    # Sort the artifacts by the platform name.
+    artifacts = sorted(artifacts, key=operator.itemgetter(0))
+
+    print('For GitHub:')
+    print('<table>')
+    print('  <tr>')
+    print('    <th>Platform</th>')
+    print('    <th>Package</th>')
+    print('    <th>Size (bytes)</th>')
+    print('    <th>SHA1 Checksum</th>')
+    print('  </tr>')
+    for host, package, size, sha in artifacts:
+        url_base = 'https://dl.google.com/android/repository/'
+        package_url = url_base + package
+        link = '<a href="{}">{}</a>'.format(package_url, package)
+
+        print('  <tr>')
+        print('    <td>{}</td>'.format(host))
+        print('    <td>{}</td>'.format(link))
+        print('    <td>{}</td>'.format(size))
+        print('    <td>{}</td>'.format(sha))
+        print('  </tr>')
+    print('</table>')
     print()
-    print("For DAC:")
+    print('For DAC:')
 
-    if args.beta:
-        var_prefix = "ndk_beta"
-    elif args.lts:
-        var_prefix = "ndk_lts"
-    else:
-        var_prefix = "ndk"
-    for artifact in artifacts:
+    var_prefix = 'ndk_beta' if args.beta else 'ndk'
+    for host, package, size, sha in artifacts:
         dac_host = {
-            "darwin": "mac64_dmg",
-            "linux": "linux64",
-            "windows": "win64",
-        }[artifact.host]
+            'Mac OS X': 'mac64',
+            'Linux': 'linux64',
+            'Windows 64-bit': 'win64',
+            'Windows 32-bit': 'win32',
+        }[host]
         print()
-        print("{{# {} #}}".format(artifact.pretty_host))
-        print(
-            "{{% setvar {}_{}_download %}}{}{{% endsetvar %}}".format(
-                var_prefix, dac_host, artifact.package
-            )
-        )
-        print(
-            "{{% setvar {}_{}_bytes %}}{}{{% endsetvar %}}".format(
-                var_prefix, dac_host, artifact.size
-            )
-        )
-        print(
-            "{{% setvar {}_{}_checksum %}}{}{{% endsetvar %}}".format(
-                var_prefix, dac_host, artifact.sha
-            )
-        )
+        print('{{# {} #}}'.format(host))
+        print('{{% setvar {}_{}_download %}}{}{{% endsetvar %}}'.format(
+            var_prefix, dac_host, package))
+        print('{{% setvar {}_{}_bytes %}}{}{{% endsetvar %}}'.format(
+            var_prefix, dac_host, size))
+        print('{{% setvar {}_{}_checksum %}}{}{{% endsetvar %}}'.format(
+            var_prefix, dac_host, sha))
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/scripts/update_dac.py b/scripts/update_dac.py
index 7f243db..d994f6b 100755
--- a/scripts/update_dac.py
+++ b/scripts/update_dac.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -28,8 +28,7 @@
 import argparse
 import logging
 import os
-import shutil
-import subprocess
+
 
 THIS_DIR = os.path.realpath(os.path.dirname(__file__))
 NDK_DIR = os.path.dirname(THIS_DIR)
@@ -42,25 +41,28 @@
 
 def copy2(src, dst):
     """shutil.copy2 with logging."""
-    logger().info("copy2: %s %s", src, dst)
+    import shutil
+    logger().info('copy2: %s %s', src, dst)
     shutil.copy2(src, dst)
 
 
 def rmtree(path):
     """shutil.rmtree with logging."""
-    logger().info("rmtree: %s", path)
+    import shutil
+    logger().info('rmtree: %s', path)
     shutil.rmtree(path)
 
 
 def makedirs(path):
     """os.makedirs with logging."""
-    logger().info("makedirs: %s", path)
+    logger().info('makedirs: %s', path)
     os.makedirs(path)
 
 
 def call(cmd, *args, **kwargs):
     """subprocess.call with logging."""
-    logger().info("call: %s", " ".join(cmd))
+    import subprocess
+    logger().info('call: %s', ' '.join(cmd))
     subprocess.call(cmd, *args, **kwargs)
 
 
@@ -70,31 +72,31 @@
     * Rewrite "[TOC]" (gitiles spelling) to "[[TOC]]" (devsite spelling).
     * Add devsite metadata for navigation support.
     """
-    docs_dir = os.path.join(NDK_DIR, "docs/user")
-    out_dir = os.path.join(NDK_DIR, "docs/out")
+    docs_dir = os.path.join(NDK_DIR, 'docs/user')
+    out_dir = os.path.join(NDK_DIR, 'docs/out')
     if os.path.exists(out_dir):
         rmtree(out_dir)
     makedirs(out_dir)
     for doc in os.listdir(docs_dir):
-        with open(os.path.join(out_dir, doc), "w") as out_file:
+        with open(os.path.join(out_dir, doc), 'w') as out_file:
             out_file.write(
-                "Project: /ndk/_project.yaml\n"
-                "Book: /ndk/guides/_book.yaml\n"
-                "Subcategory: guide\n"
-                "\n"
-            )
+                'Project: /ndk/_project.yaml\n'
+                'Book: /ndk/guides/_book.yaml\n'
+                'Subcategory: guide\n'
+                '\n')
 
             path = os.path.join(docs_dir, doc)
             with open(path) as in_file:
                 contents = in_file.read()
-                contents = contents.replace("[TOC]", "[[TOC]]")
+                contents = contents.replace('[TOC]', '[[TOC]]')
                 out_file.write(contents)
     return out_dir
 
 
 def copy_docs(docs_tree, docs_out):
     """Copy the docs to the devsite directory."""
-    dest_dir = os.path.join(docs_tree, "googledata/devsite/site-android/en/ndk/guides")
+    dest_dir = os.path.join(
+        docs_tree, 'googledata/devsite/site-android/en/ndk/guides')
 
     cwd = os.getcwd()
     for root, _, files in os.walk(docs_out):
@@ -107,7 +109,7 @@
                 try:
                     # Might fail if the file is new (will only happen if the
                     # script is re-run), but that's not a problem.
-                    call(["g4", "edit", file_name])
+                    call(['g4', 'edit', file_name])
                 finally:
                     os.chdir(cwd)
             copy2(os.path.join(root, file_name), dest_dir)
@@ -118,8 +120,8 @@
     parser = argparse.ArgumentParser()
 
     parser.add_argument(
-        "docs_tree", type=os.path.realpath, metavar="DOCS_TREE", help="Path to DAC tree"
-    )
+        'docs_tree', type=os.path.realpath, metavar='DOCS_TREE',
+        help='Path to DAC tree')
 
     return parser.parse_args()
 
@@ -132,5 +134,5 @@
     copy_docs(args.docs_tree, docs_out)
 
 
-if __name__ == "__main__":
+if __name__ == '__main__':
     main()
diff --git a/scripts/update_kokoro_prebuilts.py b/scripts/update_kokoro_prebuilts.py
deleted file mode 100755
index c8cf8ac..0000000
--- a/scripts/update_kokoro_prebuilts.py
+++ /dev/null
@@ -1,436 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""Downloads a set of Kokoro artifacts and prepares commits updating prebuilts.
-
-The script accepts a list of Kokoro build IDs (a list of UUIDs). It downloads
-each build's main archive and extracts it into the appropriate place in an
-ndk-kokoro-main repo/pore checkout. It automatically creates a branch and a
-commit updating each prebuilt.
-
-The script uses the `gsutil` CLI tool from the Google Cloud SDK to download
-artifacts. It first uses a `gsutil ls` command with a '**' wildcard to search
-the GCS bucket for the XML manifests for the given UUIDs. These manifest paths
-contain the Kokoro job name.
-
-For the given set of build IDs, the script verifies that no two builds use
-different Git SHAs for a given repository, which guards against accidentally
-updating two hosts to different versions.
-"""
-
-import argparse
-import glob
-import logging
-import os
-import re
-import shlex
-import shutil
-import subprocess
-import sys
-import textwrap
-from dataclasses import dataclass
-from pathlib import Path
-from typing import Sequence
-from uuid import UUID
-from xml.etree import ElementTree
-
-THIS_DIR = Path(__file__).resolve().parent
-REPO_ROOT = THIS_DIR.parent.parent
-
-GCS_BUCKET = "ndk-kokoro-release-artifacts"
-
-
-@dataclass(frozen=True)
-class KokoroPrebuilt:
-    title: str
-    extract_path: str
-    artifact_glob: str
-
-
-# A map from a Kokoro job name to the paths needed for downloading and
-# extracting an archive.
-KOKORO_PREBUILTS: dict[str, KokoroPrebuilt] = {
-    "ndk/cmake/linux_release": KokoroPrebuilt(
-        title="Linux CMake",
-        extract_path="prebuilts/cmake/linux-x86",
-        artifact_glob="cmake-linux-*-{build_id}.zip",
-    ),
-    "ndk/cmake/darwin_release": KokoroPrebuilt(
-        title="Darwin CMake",
-        extract_path="prebuilts/cmake/darwin-x86",
-        artifact_glob="cmake-darwin-*-{build_id}.zip",
-    ),
-    "ndk/cmake/windows_release": KokoroPrebuilt(
-        title="Windows CMake",
-        extract_path="prebuilts/cmake/windows-x86",
-        artifact_glob="cmake-windows-*-{build_id}.zip",
-    ),
-    "ndk/ninja/linux_release": KokoroPrebuilt(
-        title="Linux Ninja",
-        extract_path="prebuilts/ninja/linux-x86",
-        artifact_glob="ninja-linux-{build_id}.zip",
-    ),
-    "ndk/ninja/darwin_release": KokoroPrebuilt(
-        title="Darwin Ninja",
-        extract_path="prebuilts/ninja/darwin-x86",
-        artifact_glob="ninja-darwin-{build_id}.zip",
-    ),
-    "ndk/ninja/windows_release": KokoroPrebuilt(
-        title="Windows Ninja",
-        extract_path="prebuilts/ninja/windows-x86",
-        artifact_glob="ninja-windows-{build_id}.zip",
-    ),
-    "ndk/python3/linux_release": KokoroPrebuilt(
-        title="Linux Python3",
-        extract_path="prebuilts/python/linux-x86",
-        artifact_glob="python3-linux-{build_id}.tar.bz2",
-    ),
-    "ndk/python3/darwin_release": KokoroPrebuilt(
-        title="Darwin Python3",
-        extract_path="prebuilts/python/darwin-x86",
-        artifact_glob="python3-darwin-{build_id}.tar.bz2",
-    ),
-    "ndk/python3/windows_release": KokoroPrebuilt(
-        title="Windows Python3",
-        extract_path="prebuilts/python/windows-x86",
-        artifact_glob="python3-windows-{build_id}.zip",
-    ),
-}
-
-
-def logger() -> logging.Logger:
-    """Returns the module logger."""
-    return logging.getLogger(__name__)
-
-
-def check_call(cmd: Sequence[str]) -> None:
-    """subprocess.check_call with logging."""
-    logger().info("check_call `%s`", shlex.join(cmd))
-    subprocess.check_call(cmd)
-
-
-def rmtree(path: Path) -> None:
-    """shutil.rmtree with logging."""
-    logger().info("rmtree %s", path)
-    shutil.rmtree(path)
-
-
-def makedirs(path: Path) -> None:
-    """os.makedirs with logging."""
-    logger().info("mkdir -p %s", path)
-    path.mkdir(parents=True, exist_ok=True)
-
-
-def in_pore_tree() -> bool:
-    """Returns True if the tree is using pore instead of repo."""
-    return (REPO_ROOT / ".pore").exists()
-
-
-def parse_args() -> argparse.Namespace:
-    """Parses and returns command line arguments."""
-    parser = argparse.ArgumentParser(
-        description="Downloads artifacts from Kokoro and prepares commits to "
-        "update prebuilts."
-    )
-
-    parser.add_argument(
-        "build_id",
-        metavar="BUILD_ID",
-        type=UUID,
-        nargs="+",
-        help=("Kokoro build ID (a UUID)"),
-    )
-
-    parser.add_argument(
-        "-m", "--message", default="", help="Extra text to include in commit messsages."
-    )
-
-    parser.add_argument(
-        "-b", "--bug", default="None", help="Bug URL for commit messages."
-    )
-
-    branch_group = parser.add_mutually_exclusive_group()
-
-    branch_group.add_argument(
-        "--use-current-branch",
-        action="store_true",
-        help="Do not repo/pore start new branches for the update.",
-    )
-
-    branch_group.add_argument(
-        "--branch",
-        default="update-kokoro-prebuilts",
-        help="Name of branch to pass to repo/pore start.",
-    )
-
-    parser.add_argument(
-        "-v", "--verbose", action="store_true", help="Dump extra debugging information."
-    )
-
-    return parser.parse_args()
-
-
-@dataclass(frozen=True)
-class BuildStatus:
-    job_name: str
-    build_id: UUID
-    gcs_path: str
-    # name -> sha. (e.g. 'external/cmake' -> '86d651ddf5a1ca0ec3e4823bda800b0cea32d253')
-    repos: dict[str, str]
-
-
-def parse_manifest_repos(manifest_path: Path) -> dict[str, str]:
-    root = ElementTree.parse(manifest_path).getroot()
-    logger().debug("parsing XML manifest %s", str(manifest_path))
-    result = {}
-    for project in root.findall("project"):
-        project_str = (
-            ElementTree.tostring(project, encoding="unicode").strip()
-            + f" from {manifest_path}"
-        )
-        path = project.get("path")
-        if path is None:
-            sys.exit(f"error: path attribute missing from: {project_str}")
-        revision = project.get("revision")
-        if revision is None:
-            sys.exit(f"error: revision attribute missing from: {project_str}")
-        result[path] = revision
-    return result
-
-
-def get_build_status(
-    build_id_list: list[UUID], gsutil_cmd: str, tmp_dir: Path
-) -> list[BuildStatus]:
-    """Use gsutil to query build statuses of a set of build IDs."""
-
-    # Search the GCS bucket for XML manifests matching the build IDs. Allow the
-    # command to fail, because we'll do a better job of reporting missing UUIDs
-    # afterwards.
-    gsutil_ls_cmd = [gsutil_cmd, "ls"] + [
-        f"gs://{GCS_BUCKET}/**/manifest-{x}.xml" for x in build_id_list
-    ]
-    logger().info("run `%s`", shlex.join(gsutil_ls_cmd))
-    ls_output = subprocess.run(
-        gsutil_ls_cmd, encoding="utf8", stdout=subprocess.PIPE, check=False
-    )
-
-    @dataclass(frozen=True)
-    class LsLine:
-        job_name: str
-        gcs_path: str
-
-    ls_info: dict[UUID, LsLine] = {}
-
-    for ls_line in ls_output.stdout.splitlines():
-        logger().debug("gsutil ls output: %s", ls_line)
-        match = re.match(
-            rf"(gs://{GCS_BUCKET}/prod/"
-            r"(.*)/"  # Kokoro job name (e.g. ndk/cmake/linux_release)
-            r"\d+/"  # build number (e.g. 17)
-            r"\d+-\d+)"  # timestamp (e.g. 20211109-203945)
-            r"/manifest-([0-9a-f-]+)\.xml$",
-            ls_line,
-        )
-        if not match:
-            sys.exit(f"error: could not parse `gsutil ls` line: {ls_line}")
-        gcs_path, job_name, bid_str = match.groups()
-        ls_info[UUID(bid_str)] = LsLine(job_name, gcs_path)
-
-    missing = set(build_id_list) - ls_info.keys()
-    if len(missing) > 0:
-        sys.exit("error: build IDs not found: " + ", ".join(map(str, sorted(missing))))
-
-    xml_paths = [f"{ls_info[bid].gcs_path}/manifest-{bid}.xml" for bid in build_id_list]
-    check_call(["gsutil", "cp"] + xml_paths + [str(tmp_dir)])
-
-    result = []
-    for bid in build_id_list:
-        repos = parse_manifest_repos(tmp_dir / f"manifest-{bid}.xml")
-        result.append(
-            BuildStatus(ls_info[bid].job_name, bid, ls_info[bid].gcs_path, repos)
-        )
-    return result
-
-
-def validate_build_repos(builds: list[BuildStatus]) -> None:
-    """Make sure that no two builds have different git SHAs for the same
-    repository name."""
-    repos = {}
-    success = True
-    for build in builds:
-        for name, sha in build.repos.items():
-            if name not in repos:
-                repos[name] = (sha, build.build_id)
-            else:
-                if repos[name][0] != sha:
-                    print(f"error: conflicting versions of {name} repo:")
-                    print(f" - {repos[name][0]} in {repos[name][1]}")
-                    print(f" - {sha} in {build.build_id}")
-                    success = False
-    if not success:
-        sys.exit(1)
-
-    # Print out a table of git SHAs and repository names.
-    print()
-    print("No conflicting repositories detected:")
-    for name, (sha, _) in sorted(repos.items()):
-        print(f"{sha} {name}")
-    print()
-
-
-def validate_job_names(builds: list[BuildStatus]) -> None:
-    print("Kokoro builds to download:")
-    for build in builds:
-        print(f"{build.build_id} {build.job_name}")
-    print()
-    names = [build.job_name for build in builds]
-    for name in names:
-        if names.count(name) != 1:
-            sys.exit(f"error: job {name} specified more than once")
-        if name not in KOKORO_PREBUILTS:
-            sys.exit(f"error: job {name} is not handled by this script")
-
-
-def clean_dest_dir(parent: Path) -> None:
-    """Remove the contents of the directory (whether tracked or untracked by
-    git), but don't remove .git or .gitignore."""
-    logger().info("clean_dest_dir %s", parent)
-    for name in os.listdir(parent):
-        if name == ".git":
-            continue
-        if name == ".gitignore":
-            # The prebuilts/python/* directories have a .gitignore file that
-            # isn't part of the Kokoro archive, but we want to preserve it when
-            # updating prebuilts.
-            continue
-        path = parent / name
-        if path.is_symlink() or path.is_file():
-            os.unlink(path)
-        else:
-            shutil.rmtree(path)
-
-
-def download_artifacts(
-    builds: list[BuildStatus], gsutil_cmd: str, tmp_dir: Path
-) -> list[Path]:
-    """Download each build's artifact.
-
-    Return a list of absolute paths."""
-    patterns = []
-    for build in builds:
-        prebuilt = KOKORO_PREBUILTS[build.job_name]
-        patterns.append(
-            build.gcs_path
-            + "/"
-            + prebuilt.artifact_glob.format(build_id=build.build_id)
-        )
-
-    check_call([gsutil_cmd, "-m", "cp"] + patterns + [str(tmp_dir)])
-    artifacts = []
-    for pattern in patterns:
-        (artifact,) = glob.glob(str(tmp_dir / os.path.basename(pattern)))
-        artifacts.append(Path(artifact))
-
-    return artifacts
-
-
-def update_artifact(
-    build: BuildStatus,
-    archive_path: Path,
-    extra_message: str,
-    bug: str,
-    use_current_branch: bool,
-    branch_name: str,
-) -> None:
-    prebuilt = KOKORO_PREBUILTS[build.job_name]
-    dest_path = REPO_ROOT / prebuilt.extract_path
-
-    os.chdir(dest_path)
-
-    if not use_current_branch:
-        repo_cmd = "pore" if in_pore_tree() else "repo"
-        check_call([repo_cmd, "start", branch_name])
-
-    clean_dest_dir(dest_path)
-    if archive_path.name.endswith(".tar.bz2"):
-        check_call(["tar", "-xf", str(archive_path)])
-    elif archive_path.name.endswith(".zip"):
-        check_call(["unzip", "-q", str(archive_path)])
-    else:
-        sys.exit(f"error: unrecognized type of archive: {archive_path}")
-    # Pass -f so that files from the archive are added even if they are listed
-    # in .gitignore.
-    check_call(["git", "add", "-f", "."])
-
-    commit_msg = textwrap.dedent(
-        f"""\
-        Update {prebuilt.title} prebuilt
-
-        Fusion2: http://fusion2/{build.build_id}
-        GCS path: {build.gcs_path}
-        Prebuilt updated using: {Path(__file__).resolve().relative_to(REPO_ROOT)}
-
-        {extra_message}
-
-        Test: Treehugger, Kokoro presubmit
-        Bug: {bug}
-        """
-    )
-
-    check_call(["git", "commit", "-m", commit_msg])
-
-
-def main() -> None:
-    args = parse_args()
-    if args.verbose:
-        logging.basicConfig(level=logging.DEBUG)
-    else:
-        logging.basicConfig(level=logging.INFO)
-
-    gsutil_cmd = shutil.which("gsutil")
-    if not gsutil_cmd:
-        sys.exit(
-            'error: no "gsutil" in PATH. ' 'Try "apt-get install google-cloud-sdk".'
-        )
-
-    tmp_dir = REPO_ROOT / "gcs_artifacts"
-    if tmp_dir.exists():
-        rmtree(tmp_dir)
-    makedirs(tmp_dir)
-
-    for build_id in args.build_id:
-        if args.build_id.count(build_id) != 1:
-            sys.exit(f"error: build ID {build_id} is duplicated")
-
-    builds = get_build_status(args.build_id, gsutil_cmd, tmp_dir)
-    validate_build_repos(builds)
-    validate_job_names(builds)
-    artifacts = download_artifacts(builds, gsutil_cmd, tmp_dir)
-
-    for build, artifact in zip(builds, artifacts):
-        update_artifact(
-            build,
-            artifact,
-            args.message,
-            args.bug,
-            args.use_current_branch,
-            args.branch,
-        )
-
-
-if __name__ == "__main__":
-    main()
diff --git a/setup.py b/setup.py
index e5b2d77..01aef9a 100644
--- a/setup.py
+++ b/setup.py
@@ -17,25 +17,28 @@
 from __future__ import absolute_import
 
 import os
+import setuptools
 
-import setuptools  # type: ignore
 
 THIS_DIR = os.path.dirname(os.path.realpath(__file__))
 
 
-with open(os.path.join(THIS_DIR, "README.md")) as readme_file:
+with open(os.path.join(THIS_DIR, 'README.md')) as readme_file:
     LONG_DESCRIPTION = readme_file.read()
 
 
 setuptools.setup(
-    name="ndk",
-    version="1.0.0",
-    description="Build and test tools for working on the NDK.",
+    name='ndk',
+    version='1.0.0',
+
+    description='Build and test tools for working on the NDK.',
     long_description=LONG_DESCRIPTION,
+
     packages=setuptools.find_packages(),
+
     entry_points={
-        "console_scripts": [
-            "run_tests.py = ndk.run_tests:main",
+        'console_scripts': [
+            'run_tests.py = ndk.run_tests:main',
         ],
     },
 )
diff --git a/sources/android/cpufeatures/Android.mk b/sources/android/cpufeatures/Android.mk
index be9b73d..e42fdf8 100644
--- a/sources/android/cpufeatures/Android.mk
+++ b/sources/android/cpufeatures/Android.mk
@@ -1,6 +1,3 @@
-# This library is provided only for legacy support. For a maintained library,
-# migrate to https://github.com/google/cpu_features.
-
 LOCAL_PATH := $(call my-dir)
 
 include $(CLEAR_VARS)
diff --git a/sources/android/cpufeatures/README.md b/sources/android/cpufeatures/README.md
deleted file mode 100644
index b31cc4c..0000000
--- a/sources/android/cpufeatures/README.md
+++ /dev/null
@@ -1,4 +0,0 @@
-# cpufeatures
-
-This library is provided only for legacy support. For a maintained library,
-migrate to https://github.com/google/cpu_features.
\ No newline at end of file
diff --git a/sources/android/cpufeatures/cpu-features.c b/sources/android/cpufeatures/cpu-features.c
index 7fce22c..e2bd749 100644
--- a/sources/android/cpufeatures/cpu-features.c
+++ b/sources/android/cpufeatures/cpu-features.c
@@ -26,9 +26,39 @@
  * SUCH DAMAGE.
  */
 
-/*
- * This library is provided only for legacy support. For a maintained library,
- * migrate to https://github.com/google/cpu_features.
+/* ChangeLog for this library:
+ *
+ * NDK r10e?: Add MIPS MSA feature.
+ *
+ * NDK r10: Support for 64-bit CPUs (Intel, ARM & MIPS).
+ *
+ * NDK r8d: Add android_setCpu().
+ *
+ * NDK r8c: Add new ARM CPU features: VFPv2, VFP_D32, VFP_FP16,
+ *          VFP_FMA, NEON_FMA, IDIV_ARM, IDIV_THUMB2 and iWMMXt.
+ *
+ *          Rewrite the code to parse /proc/self/auxv instead of
+ *          the "Features" field in /proc/cpuinfo.
+ *
+ *          Dynamically allocate the buffer that hold the content
+ *          of /proc/cpuinfo to deal with newer hardware.
+ *
+ * NDK r7c: Fix CPU count computation. The old method only reported the
+ *           number of _active_ CPUs when the library was initialized,
+ *           which could be less than the real total.
+ *
+ * NDK r5: Handle buggy kernels which report a CPU Architecture number of 7
+ *         for an ARMv6 CPU (see below).
+ *
+ *         Handle kernels that only report 'neon', and not 'vfpv3'
+ *         (VFPv3 is mandated by the ARM architecture is Neon is implemented)
+ *
+ *         Handle kernels that only report 'vfpv3d16', and not 'vfpv3'
+ *
+ *         Fix x86 compilation. Report ANDROID_CPU_FAMILY_X86 in
+ *         android_getCpuFamily().
+ *
+ * NDK r4: Initial release
  */
 
 #include "cpu-features.h"
@@ -40,7 +70,6 @@
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
-#include <sys/auxv.h>
 #include <sys/system_properties.h>
 #include <unistd.h>
 
@@ -462,6 +491,56 @@
     HWCAP_IDIVT )
 #endif
 
+#if defined(__mips__)
+// see <uapi/asm/hwcap.h> kernel header
+#define HWCAP_MIPS_R6           (1 << 0)
+#define HWCAP_MIPS_MSA          (1 << 1)
+#endif
+
+#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
+
+#define AT_HWCAP 16
+#define AT_HWCAP2 26
+
+// Probe the system's C library for a 'getauxval' function and call it if
+// it exits, or return 0 for failure. This function is available since API
+// level 20.
+//
+// This code does *NOT* check for '__ANDROID_API__ >= 20' to support the
+// edge case where some NDK developers use headers for a platform that is
+// newer than the one really targetted by their application.
+// This is typically done to use newer native APIs only when running on more
+// recent Android versions, and requires careful symbol management.
+//
+// Note that getauxval() can't really be re-implemented here, because
+// its implementation does not parse /proc/self/auxv. Instead it depends
+// on values  that are passed by the kernel at process-init time to the
+// C runtime initialization layer.
+static uint32_t
+get_elf_hwcap_from_getauxval(int hwcap_type) {
+    typedef unsigned long getauxval_func_t(unsigned long);
+
+    dlerror();
+    void* libc_handle = dlopen("libc.so", RTLD_NOW);
+    if (!libc_handle) {
+        D("Could not dlopen() C library: %s\n", dlerror());
+        return 0;
+    }
+
+    uint32_t ret = 0;
+    getauxval_func_t* func = (getauxval_func_t*)
+            dlsym(libc_handle, "getauxval");
+    if (!func) {
+        D("Could not find getauxval() in C library\n");
+    } else {
+        // Note: getauxval() returns 0 on failure. Doesn't touch errno.
+        ret = (uint32_t)(*func)(hwcap_type);
+    }
+    dlclose(libc_handle);
+    return ret;
+}
+#endif
+
 #if defined(__arm__)
 // Parse /proc/self/auxv to extract the ELF HW capabilities bitmap for the
 // current CPU. Note that this file is not accessible from regular
@@ -580,12 +659,15 @@
     g_cpuFamily = ANDROID_CPU_FAMILY_ARM;
 #elif defined(__i386__)
     g_cpuFamily = ANDROID_CPU_FAMILY_X86;
+#elif defined(__mips64)
+/* Needs to be before __mips__ since the compiler defines both */
+    g_cpuFamily = ANDROID_CPU_FAMILY_MIPS64;
+#elif defined(__mips__)
+    g_cpuFamily = ANDROID_CPU_FAMILY_MIPS;
 #elif defined(__aarch64__)
     g_cpuFamily = ANDROID_CPU_FAMILY_ARM64;
 #elif defined(__x86_64__)
     g_cpuFamily = ANDROID_CPU_FAMILY_X86_64;
-#elif defined(__riscv) && __riscv_xlen == 64
-    g_cpuFamily = ANDROID_CPU_FAMILY_RISCV64;
 #else
     g_cpuFamily = ANDROID_CPU_FAMILY_UNKNOWN;
 #endif
@@ -693,7 +775,8 @@
         }
 
         /* Extract the list of CPU features from ELF hwcaps */
-        uint32_t hwcaps = getauxval(AT_HWCAP);
+        uint32_t hwcaps = 0;
+        hwcaps = get_elf_hwcap_from_getauxval(AT_HWCAP);
         if (!hwcaps) {
             D("Parsing /proc/self/auxv to extract ELF hwcaps!\n");
             hwcaps = get_elf_hwcap_from_proc_self_auxv();
@@ -766,7 +849,8 @@
         }
 
         /* Extract the list of CPU features from ELF hwcaps2 */
-        uint32_t hwcaps2 = getauxval(AT_HWCAP2);
+        uint32_t hwcaps2 = 0;
+        hwcaps2 = get_elf_hwcap_from_getauxval(AT_HWCAP2);
         if (hwcaps2 != 0) {
             int has_aes     = (hwcaps2 & HWCAP2_AES);
             int has_pmull   = (hwcaps2 & HWCAP2_PMULL);
@@ -839,14 +923,12 @@
             uint32_t  cpuid;
             uint64_t  or_flags;
         } cpu_fixes[] = {
-            /* The Nexus 4 and 7 (Qualcomm Krait) kernel configurations
-             * forget to report IDIV support. */
+            /* The Nexus 4 (Qualcomm Krait) kernel configuration
+             * forgets to report IDIV support. */
             { 0x510006f2, ANDROID_CPU_ARM_FEATURE_IDIV_ARM |
                           ANDROID_CPU_ARM_FEATURE_IDIV_THUMB2 },
             { 0x510006f3, ANDROID_CPU_ARM_FEATURE_IDIV_ARM |
                           ANDROID_CPU_ARM_FEATURE_IDIV_THUMB2 },
-            { 0x511006f0, ANDROID_CPU_ARM_FEATURE_IDIV_ARM |
-                          ANDROID_CPU_ARM_FEATURE_IDIV_THUMB2 },
         };
         size_t n;
         for (n = 0; n < sizeof(cpu_fixes)/sizeof(cpu_fixes[0]); ++n) {
@@ -877,7 +959,8 @@
 #ifdef __aarch64__
     {
         /* Extract the list of CPU features from ELF hwcaps */
-        uint32_t hwcaps = getauxval(AT_HWCAP);
+        uint32_t hwcaps = 0;
+        hwcaps = get_elf_hwcap_from_getauxval(AT_HWCAP);
         if (hwcaps != 0) {
             int has_fp      = (hwcaps & HWCAP_FP);
             int has_asimd   = (hwcaps & HWCAP_ASIMD);
@@ -961,6 +1044,21 @@
 
 
 #endif
+#if defined( __mips__)
+    {   /* MIPS and MIPS64 */
+        /* Extract the list of CPU features from ELF hwcaps */
+        uint32_t hwcaps = 0;
+        hwcaps = get_elf_hwcap_from_getauxval(AT_HWCAP);
+        if (hwcaps != 0) {
+            int has_r6      = (hwcaps & HWCAP_MIPS_R6);
+            int has_msa     = (hwcaps & HWCAP_MIPS_MSA);
+            if (has_r6)
+                g_cpuFeatures |= ANDROID_CPU_MIPS_FEATURE_R6;
+            if (has_msa)
+                g_cpuFeatures |= ANDROID_CPU_MIPS_FEATURE_MSA;
+        }
+    }
+#endif /* __mips__ */
 
     free(cpuinfo);
 }
@@ -990,7 +1088,7 @@
 }
 
 static void
-android_cpuInitTrivial(void)
+android_cpuInitDummy(void)
 {
     g_inited = 1;
 }
@@ -1005,7 +1103,7 @@
     android_cpuInitFamily();
     g_cpuCount = (cpu_count <= 0 ? 1 : cpu_count);
     g_cpuFeatures = cpu_features;
-    pthread_once(&g_once, android_cpuInitTrivial);
+    pthread_once(&g_once, android_cpuInitDummy);
 
     return 1;
 }
diff --git a/sources/android/cpufeatures/cpu-features.h b/sources/android/cpufeatures/cpu-features.h
index 695e913..1e97241 100644
--- a/sources/android/cpufeatures/cpu-features.h
+++ b/sources/android/cpufeatures/cpu-features.h
@@ -25,13 +25,8 @@
  * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  * SUCH DAMAGE.
  */
-
-#pragma once
-
-/*
- * This library is provided only for legacy support. For a maintained library,
- * migrate to https://github.com/google/cpu_features.
- */
+#ifndef CPU_FEATURES_H
+#define CPU_FEATURES_H
 
 #include <sys/cdefs.h>
 #include <stdint.h>
@@ -49,7 +44,6 @@
     ANDROID_CPU_FAMILY_ARM64,
     ANDROID_CPU_FAMILY_X86_64,
     ANDROID_CPU_FAMILY_MIPS64,
-    ANDROID_CPU_FAMILY_RISCV64,
 
     ANDROID_CPU_FAMILY_MAX  /* do not remove */
 
@@ -325,3 +319,5 @@
 #endif
 
 __END_DECLS
+
+#endif /* CPU_FEATURES_H */
diff --git a/sources/android/libthread_db/README b/sources/android/libthread_db/README
new file mode 100644
index 0000000..aadd763
--- /dev/null
+++ b/sources/android/libthread_db/README
@@ -0,0 +1,10 @@
+Here are the sources of the special libthread_db that will be statically
+linked against our gdbserver binary. These are uses automatically by the
+build-gdbserver.sh script.
+
+THIS IS NOT AN IMPORT MODULE.
+
+Applications don't need to link to libthread_db anyway, this library is
+a small interface used by gdbserver to manage the list of threads on the
+target process. Its API and ABI are not stable and may change in the
+future.
diff --git a/sources/android/libthread_db/libthread_db.c b/sources/android/libthread_db/libthread_db.c
new file mode 100644
index 0000000..407885f
--- /dev/null
+++ b/sources/android/libthread_db/libthread_db.c
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dirent.h>
+#include <stdint.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/ptrace.h>
+#include <thread_db.h>
+#include <unistd.h>
+
+extern pid_t ps_getpid(struct ps_prochandle*);
+
+// We don't have any symbols to cache.
+char const** td_symbol_list(void) {
+    static char const* symbols[] = { NULL };
+    return symbols;
+}
+
+//
+// Thread agents.
+//
+
+td_err_e td_ta_new(struct ps_prochandle* proc_handle, td_thragent_t** agent_out) {
+    td_thragent_t* agent = (td_thragent_t*) calloc(1, sizeof(td_thragent_t));
+    if (!agent) {
+        return TD_MALLOC;
+    }
+
+    agent->pid = ps_getpid(proc_handle);
+    agent->ph = proc_handle;
+    *agent_out = agent;
+
+    return TD_OK;
+}
+
+
+td_err_e td_ta_delete(td_thragent_t* ta) {
+    free(ta);
+    return TD_OK;
+}
+
+td_err_e td_ta_map_lwp2thr(td_thragent_t const* agent, lwpid_t lwpid, td_thrhandle_t* th) {
+    th->pid = ps_getpid(agent->ph);
+    th->tid = lwpid;
+    return TD_OK;
+}
+
+td_err_e td_ta_thr_iter(td_thragent_t const* agent,
+                        td_thr_iter_f* func,
+                        void* cookie,
+                        td_thr_state_e state,
+                        int32_t prio,
+                        sigset_t* sigmask,
+                        uint32_t user_flags) {
+    td_err_e err = TD_OK;
+    char path[32];
+    DIR * dir;
+    struct dirent * entry;
+    td_thrhandle_t handle;
+
+    snprintf(path, sizeof(path), "/proc/%d/task/", agent->pid);
+    dir = opendir(path);
+    if (!dir) {
+        return TD_NOEVENT;
+    }
+
+    handle.pid = agent->pid;
+    while ((entry = readdir(dir)) != NULL) {
+        if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) {
+            continue;
+        }
+        handle.tid = atoi(entry->d_name);
+        if (func(&handle, cookie) != 0) {
+	    err = TD_DBERR;
+            break;
+        }
+    }
+
+    closedir(dir);
+
+    return err;
+}
+
+//
+// Threads.
+//
+
+td_err_e td_thr_get_info(td_thrhandle_t const* handle, td_thrinfo_t* info) {
+    info->ti_tid = handle->tid;
+    info->ti_lid = handle->tid; // Our pthreads uses kernel ids for tids
+    info->ti_state = TD_THR_SLEEP; /* XXX this needs to be read from /proc/<pid>/task/<tid>.
+                                      This is only used to see if the thread is a zombie or not */
+    return TD_OK;
+}
+
+//
+// TLS.
+//
+
+td_err_e td_thr_tlsbase(const td_thrhandle_t* unused1, unsigned long int unused2, psaddr_t* unused3) {
+  return TD_NOAPLIC; // TODO: fix this if/when we support ELF TLS.
+}
+
+td_err_e td_thr_tls_get_addr(const td_thrhandle_t* unused1, psaddr_t unused2, size_t unused3, psaddr_t* unused4) {
+  return TD_NOAPLIC; // TODO: fix this if/when we support ELF TLS.
+}
+
+//
+// Thread events.
+//
+
+// Thread events are no longer used by gdb >= 7.0.
+// Because we link gdbserver statically, though, we need dummy definitions.
+td_err_e td_ta_set_event(td_thragent_t const* agent, td_thr_events_t* events) {
+    abort();
+}
+td_err_e td_ta_event_getmsg(td_thragent_t const* agent, td_event_msg_t* event) {
+    abort();
+}
+td_err_e td_thr_event_enable(const td_thrhandle_t* handle, int event) {
+    abort();
+}
+td_err_e td_ta_clear_event(const td_thragent_t* ta_arg, td_thr_events_t* event) {
+    abort();
+}
+td_err_e td_ta_event_addr(td_thragent_t const* agent, td_event_e event, td_notify_t* notify_out) {
+    abort();
+}
diff --git a/sources/android/libthread_db/thread_db.h b/sources/android/libthread_db/thread_db.h
new file mode 100644
index 0000000..9ed4199
--- /dev/null
+++ b/sources/android/libthread_db/thread_db.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _LIBTHREAD_DB__THREAD_DB_H
+#define _LIBTHREAD_DB__THREAD_DB_H
+
+#include <pthread.h>
+#include <signal.h>
+#include <stdint.h>
+#include <sys/procfs.h>
+#include <sys/types.h>
+
+#define TD_THR_ANY_USER_FLAGS       0xffffffff
+#define TD_THR_LOWEST_PRIORITY      -20
+#define TD_SIGNO_MASK               NULL
+
+/* td_err_e values */
+enum {
+    TD_OK,
+    TD_ERR,
+    TD_NOTHR,
+    TD_NOSV,
+    TD_NOLWP,
+    TD_BADPH,
+    TD_BADTH,
+    TD_BADSH,
+    TD_BADTA,
+    TD_BADKEY,
+    TD_NOMSG,
+    TD_NOFPREGS,
+    TD_NOLIBTHREAD,
+    TD_NOEVENT,
+    TD_NOCAPAB,
+    TD_DBERR,
+    TD_NOAPLIC,
+    TD_NOTSD,
+    TD_MALLOC,
+    TD_PARTIALREG,
+    TD_NOXREGS,
+    TD_VERSION
+};
+
+/*
+ * td_event_e values
+ * NOTE: There is a max of 32 events
+ */
+enum {
+    TD_CREATE,
+    TD_DEATH
+};
+
+/* td_thr_state_e values */
+enum {
+    TD_THR_ANY_STATE,
+    TD_THR_UNKNOWN,
+    TD_THR_SLEEP,
+    TD_THR_ZOMBIE
+};
+
+typedef int32_t td_err_e;
+typedef uint32_t td_event_e;
+typedef uint32_t td_notify_e;
+typedef uint32_t td_thr_state_e;
+typedef pthread_t thread_t;
+
+typedef struct
+{
+    pid_t pid;
+    struct ps_prochandle *ph;
+} td_thragent_t;
+
+typedef struct
+{
+    pid_t pid;
+    pid_t tid;
+    psaddr_t th_unique;
+} td_thrhandle_t;
+
+typedef struct
+{
+    td_event_e event;
+    td_thrhandle_t const * th_p;
+    union {
+        void * data;
+    } msg;
+} td_event_msg_t;
+
+typedef struct
+{
+    uint32_t events;
+} td_thr_events_t;
+
+typedef struct
+{
+    union {
+        void * bptaddr;
+    } u;
+} td_notify_t;
+
+typedef struct
+{
+    td_thr_state_e ti_state;
+    thread_t ti_tid; // pthread's id for the thread
+    int32_t ti_lid; // the kernel's id for the thread
+} td_thrinfo_t;
+
+
+#define td_event_emptyset(set) \
+    (set)->events = 0
+
+#define td_event_fillset(set) \
+    (set)->events = 0xffffffff
+
+#define td_event_addset(set, n) \
+    (set)->events |= (1 << n)
+
+
+typedef int td_thr_iter_f(td_thrhandle_t const *, void *);
+
+
+struct ps_prochandle;
+
+#ifdef __cplusplus
+extern "C"{
+#endif
+
+extern td_err_e td_ta_new(struct ps_prochandle * proc_handle, td_thragent_t ** thread_agent);
+
+extern td_err_e td_ta_delete(td_thragent_t * ta);
+
+extern td_err_e td_ta_set_event(td_thragent_t const * agent, td_thr_events_t * event);
+
+extern td_err_e td_ta_event_addr(td_thragent_t const * agent, td_event_e event, td_notify_t * notify);
+
+extern td_err_e td_ta_clear_event(const td_thragent_t * ta_arg,
+				  td_thr_events_t * event);
+
+extern td_err_e td_ta_event_getmsg(td_thragent_t const * agent, td_event_msg_t * event);
+
+extern td_err_e td_ta_map_lwp2thr(td_thragent_t const * agent, lwpid_t lwpid,
+				  td_thrhandle_t *th);
+
+extern td_err_e td_thr_get_info(td_thrhandle_t const * handle,
+				td_thrinfo_t * info);
+
+extern td_err_e td_thr_event_enable(const td_thrhandle_t * handle,
+				    int event);
+
+extern td_err_e td_ta_thr_iter(td_thragent_t const * agent, td_thr_iter_f * func, void * cookie,
+                               td_thr_state_e state, int32_t prio, sigset_t * sigmask, uint32_t user_flags);
+
+extern char const ** td_symbol_list(void);
+
+extern td_err_e td_thr_get_info(td_thrhandle_t const * handle, td_thrinfo_t * info);
+
+extern td_err_e td_thr_tlsbase(const td_thrhandle_t*, unsigned long int, psaddr_t*);
+
+extern td_err_e td_thr_tls_get_addr(const td_thrhandle_t*, psaddr_t, size_t, psaddr_t*);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/sources/android/native_app_glue/NOTICE b/sources/android/native_app_glue/NOTICE
index da97d29..d6c0922 100644
--- a/sources/android/native_app_glue/NOTICE
+++ b/sources/android/native_app_glue/NOTICE
@@ -1,4 +1,4 @@
-Copyright (C) 2010 The Android Open Source Project
+Copyright (C) 2016 The Android Open Source Project
 
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
diff --git a/sources/android/native_app_glue/android_native_app_glue.c b/sources/android/native_app_glue/android_native_app_glue.c
index 1e63c5e..7eada08 100644
--- a/sources/android/native_app_glue/android_native_app_glue.c
+++ b/sources/android/native_app_glue/android_native_app_glue.c
@@ -12,17 +12,18 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
+ *
  */
 
-#include "android_native_app_glue.h"
-
 #include <jni.h>
 
 #include <errno.h>
 #include <stdlib.h>
 #include <string.h>
 #include <unistd.h>
+#include <sys/resource.h>
 
+#include "android_native_app_glue.h"
 #include <android/log.h>
 
 #define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "threaded_app", __VA_ARGS__))
@@ -47,12 +48,17 @@
 
 int8_t android_app_read_cmd(struct android_app* android_app) {
     int8_t cmd;
-    if (read(android_app->msgread, &cmd, sizeof(cmd)) != sizeof(cmd)) {
+    if (read(android_app->msgread, &cmd, sizeof(cmd)) == sizeof(cmd)) {
+        switch (cmd) {
+            case APP_CMD_SAVE_STATE:
+                free_saved_state(android_app);
+                break;
+        }
+        return cmd;
+    } else {
         LOGE("No data on command pipe!");
-        return -1;
     }
-    if (cmd == APP_CMD_SAVE_STATE) free_saved_state(android_app);
-    return cmd;
+    return -1;
 }
 
 static void print_cur_config(struct android_app* android_app) {
@@ -83,7 +89,7 @@
 void android_app_pre_exec_cmd(struct android_app* android_app, int8_t cmd) {
     switch (cmd) {
         case APP_CMD_INPUT_CHANGED:
-            LOGV("APP_CMD_INPUT_CHANGED");
+            LOGV("APP_CMD_INPUT_CHANGED\n");
             pthread_mutex_lock(&android_app->mutex);
             if (android_app->inputQueue != NULL) {
                 AInputQueue_detachLooper(android_app->inputQueue);
@@ -100,7 +106,7 @@
             break;
 
         case APP_CMD_INIT_WINDOW:
-            LOGV("APP_CMD_INIT_WINDOW");
+            LOGV("APP_CMD_INIT_WINDOW\n");
             pthread_mutex_lock(&android_app->mutex);
             android_app->window = android_app->pendingWindow;
             pthread_cond_broadcast(&android_app->cond);
@@ -108,7 +114,7 @@
             break;
 
         case APP_CMD_TERM_WINDOW:
-            LOGV("APP_CMD_TERM_WINDOW");
+            LOGV("APP_CMD_TERM_WINDOW\n");
             pthread_cond_broadcast(&android_app->cond);
             break;
 
@@ -116,7 +122,7 @@
         case APP_CMD_START:
         case APP_CMD_PAUSE:
         case APP_CMD_STOP:
-            LOGV("activityState=%d", cmd);
+            LOGV("activityState=%d\n", cmd);
             pthread_mutex_lock(&android_app->mutex);
             android_app->activityState = cmd;
             pthread_cond_broadcast(&android_app->cond);
@@ -124,14 +130,14 @@
             break;
 
         case APP_CMD_CONFIG_CHANGED:
-            LOGV("APP_CMD_CONFIG_CHANGED");
+            LOGV("APP_CMD_CONFIG_CHANGED\n");
             AConfiguration_fromAssetManager(android_app->config,
                     android_app->activity->assetManager);
             print_cur_config(android_app);
             break;
 
         case APP_CMD_DESTROY:
-            LOGV("APP_CMD_DESTROY");
+            LOGV("APP_CMD_DESTROY\n");
             android_app->destroyRequested = 1;
             break;
     }
@@ -140,7 +146,7 @@
 void android_app_post_exec_cmd(struct android_app* android_app, int8_t cmd) {
     switch (cmd) {
         case APP_CMD_TERM_WINDOW:
-            LOGV("APP_CMD_TERM_WINDOW");
+            LOGV("APP_CMD_TERM_WINDOW\n");
             pthread_mutex_lock(&android_app->mutex);
             android_app->window = NULL;
             pthread_cond_broadcast(&android_app->cond);
@@ -148,7 +154,7 @@
             break;
 
         case APP_CMD_SAVE_STATE:
-            LOGV("APP_CMD_SAVE_STATE");
+            LOGV("APP_CMD_SAVE_STATE\n");
             pthread_mutex_lock(&android_app->mutex);
             android_app->stateSaved = 1;
             pthread_cond_broadcast(&android_app->cond);
@@ -162,6 +168,7 @@
 }
 
 void app_dummy() {
+
 }
 
 static void android_app_destroy(struct android_app* android_app) {
@@ -181,7 +188,7 @@
 static void process_input(struct android_app* app, struct android_poll_source* source) {
     AInputEvent* event = NULL;
     while (AInputQueue_getEvent(app->inputQueue, &event) >= 0) {
-        LOGV("New input event: type=%d", AInputEvent_getType(event));
+        LOGV("New input event: type=%d\n", AInputEvent_getType(event));
         if (AInputQueue_preDispatchEvent(app->inputQueue, event)) {
             continue;
         }
@@ -234,8 +241,9 @@
 // --------------------------------------------------------------------
 
 static struct android_app* android_app_create(ANativeActivity* activity,
-                                              void* savedState, size_t savedStateSize) {
-    struct android_app* android_app = calloc(1, sizeof(struct android_app));
+        void* savedState, size_t savedStateSize) {
+    struct android_app* android_app = (struct android_app*)malloc(sizeof(struct android_app));
+    memset(android_app, 0, sizeof(struct android_app));
     android_app->activity = activity;
 
     pthread_mutex_init(&android_app->mutex, NULL);
@@ -255,7 +263,7 @@
     android_app->msgread = msgpipe[0];
     android_app->msgwrite = msgpipe[1];
 
-    pthread_attr_t attr;
+    pthread_attr_t attr; 
     pthread_attr_init(&attr);
     pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
     pthread_create(&android_app->thread, &attr, android_app_entry, android_app);
@@ -272,7 +280,7 @@
 
 static void android_app_write_cmd(struct android_app* android_app, int8_t cmd) {
     if (write(android_app->msgwrite, &cmd, sizeof(cmd)) != sizeof(cmd)) {
-        LOGE("Failure writing android_app cmd: %s", strerror(errno));
+        LOGE("Failure writing android_app cmd: %s\n", strerror(errno));
     }
 }
 
@@ -325,30 +333,26 @@
     free(android_app);
 }
 
-static struct android_app* ToApp(ANativeActivity* activity) {
-    return (struct android_app*) activity->instance;
-}
-
 static void onDestroy(ANativeActivity* activity) {
-    LOGV("Destroy: %p", activity);
-    android_app_free(ToApp(activity));
+    LOGV("Destroy: %p\n", activity);
+    android_app_free((struct android_app*)activity->instance);
 }
 
 static void onStart(ANativeActivity* activity) {
-    LOGV("Start: %p", activity);
-    android_app_set_activity_state(ToApp(activity), APP_CMD_START);
+    LOGV("Start: %p\n", activity);
+    android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_START);
 }
 
 static void onResume(ANativeActivity* activity) {
-    LOGV("Resume: %p", activity);
-    android_app_set_activity_state(ToApp(activity), APP_CMD_RESUME);
+    LOGV("Resume: %p\n", activity);
+    android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_RESUME);
 }
 
 static void* onSaveInstanceState(ANativeActivity* activity, size_t* outLen) {
-    LOGV("SaveInstanceState: %p", activity);
-
-    struct android_app* android_app = ToApp(activity);
+    struct android_app* android_app = (struct android_app*)activity->instance;
     void* savedState = NULL;
+
+    LOGV("SaveInstanceState: %p\n", activity);
     pthread_mutex_lock(&android_app->mutex);
     android_app->stateSaved = 0;
     android_app_write_cmd(android_app, APP_CMD_SAVE_STATE);
@@ -369,89 +373,70 @@
 }
 
 static void onPause(ANativeActivity* activity) {
-    LOGV("Pause: %p", activity);
-    android_app_set_activity_state(ToApp(activity), APP_CMD_PAUSE);
+    LOGV("Pause: %p\n", activity);
+    android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_PAUSE);
 }
 
 static void onStop(ANativeActivity* activity) {
-    LOGV("Stop: %p", activity);
-    android_app_set_activity_state(ToApp(activity), APP_CMD_STOP);
+    LOGV("Stop: %p\n", activity);
+    android_app_set_activity_state((struct android_app*)activity->instance, APP_CMD_STOP);
 }
 
 static void onConfigurationChanged(ANativeActivity* activity) {
-    LOGV("ConfigurationChanged: %p", activity);
-    android_app_write_cmd(ToApp(activity), APP_CMD_CONFIG_CHANGED);
-}
-
-static void onContentRectChanged(ANativeActivity* activity, const ARect* r) {
-    LOGV("ContentRectChanged: l=%d,t=%d,r=%d,b=%d", r->left, r->top, r->right, r->bottom);
-    struct android_app* android_app = ToApp(activity);
-    pthread_mutex_lock(&android_app->mutex);
-    android_app->contentRect = *r;
-    pthread_mutex_unlock(&android_app->mutex);
-    android_app_write_cmd(ToApp(activity), APP_CMD_CONTENT_RECT_CHANGED);
+    struct android_app* android_app = (struct android_app*)activity->instance;
+    LOGV("ConfigurationChanged: %p\n", activity);
+    android_app_write_cmd(android_app, APP_CMD_CONFIG_CHANGED);
 }
 
 static void onLowMemory(ANativeActivity* activity) {
-    LOGV("LowMemory: %p", activity);
-    android_app_write_cmd(ToApp(activity), APP_CMD_LOW_MEMORY);
+    struct android_app* android_app = (struct android_app*)activity->instance;
+    LOGV("LowMemory: %p\n", activity);
+    android_app_write_cmd(android_app, APP_CMD_LOW_MEMORY);
 }
 
 static void onWindowFocusChanged(ANativeActivity* activity, int focused) {
-    LOGV("WindowFocusChanged: %p -- %d", activity, focused);
-    android_app_write_cmd(ToApp(activity), focused ? APP_CMD_GAINED_FOCUS : APP_CMD_LOST_FOCUS);
+    LOGV("WindowFocusChanged: %p -- %d\n", activity, focused);
+    android_app_write_cmd((struct android_app*)activity->instance,
+            focused ? APP_CMD_GAINED_FOCUS : APP_CMD_LOST_FOCUS);
 }
 
 static void onNativeWindowCreated(ANativeActivity* activity, ANativeWindow* window) {
-    LOGV("NativeWindowCreated: %p -- %p", activity, window);
-    android_app_set_window(ToApp(activity), window);
+    LOGV("NativeWindowCreated: %p -- %p\n", activity, window);
+    android_app_set_window((struct android_app*)activity->instance, window);
 }
 
 static void onNativeWindowDestroyed(ANativeActivity* activity, ANativeWindow* window) {
-    LOGV("NativeWindowDestroyed: %p -- %p", activity, window);
-    android_app_set_window(ToApp(activity), NULL);
-}
-
-static void onNativeWindowRedrawNeeded(ANativeActivity* activity, ANativeWindow* window) {
-    LOGV("NativeWindowRedrawNeeded: %p -- %p", activity, window);
-    android_app_write_cmd(ToApp(activity), APP_CMD_WINDOW_REDRAW_NEEDED);
-}
-
-static void onNativeWindowResized(ANativeActivity* activity, ANativeWindow* window) {
-    LOGV("NativeWindowResized: %p -- %p", activity, window);
-    android_app_write_cmd(ToApp(activity), APP_CMD_WINDOW_RESIZED);
+    LOGV("NativeWindowDestroyed: %p -- %p\n", activity, window);
+    android_app_set_window((struct android_app*)activity->instance, NULL);
 }
 
 static void onInputQueueCreated(ANativeActivity* activity, AInputQueue* queue) {
-    LOGV("InputQueueCreated: %p -- %p", activity, queue);
-    android_app_set_input(ToApp(activity), queue);
+    LOGV("InputQueueCreated: %p -- %p\n", activity, queue);
+    android_app_set_input((struct android_app*)activity->instance, queue);
 }
 
 static void onInputQueueDestroyed(ANativeActivity* activity, AInputQueue* queue) {
-    LOGV("InputQueueDestroyed: %p -- %p", activity, queue);
-    android_app_set_input(ToApp(activity), NULL);
+    LOGV("InputQueueDestroyed: %p -- %p\n", activity, queue);
+    android_app_set_input((struct android_app*)activity->instance, NULL);
 }
 
 JNIEXPORT
-void ANativeActivity_onCreate(ANativeActivity* activity, void* savedState, size_t savedStateSize) {
-    LOGV("Creating: %p", activity);
-
-    activity->callbacks->onConfigurationChanged = onConfigurationChanged;
-    activity->callbacks->onContentRectChanged = onContentRectChanged;
+void ANativeActivity_onCreate(ANativeActivity* activity, void* savedState,
+                              size_t savedStateSize) {
+    LOGV("Creating: %p\n", activity);
     activity->callbacks->onDestroy = onDestroy;
-    activity->callbacks->onInputQueueCreated = onInputQueueCreated;
-    activity->callbacks->onInputQueueDestroyed = onInputQueueDestroyed;
-    activity->callbacks->onLowMemory = onLowMemory;
-    activity->callbacks->onNativeWindowCreated = onNativeWindowCreated;
-    activity->callbacks->onNativeWindowDestroyed = onNativeWindowDestroyed;
-    activity->callbacks->onNativeWindowRedrawNeeded = onNativeWindowRedrawNeeded;
-    activity->callbacks->onNativeWindowResized = onNativeWindowResized;
-    activity->callbacks->onPause = onPause;
+    activity->callbacks->onStart = onStart;
     activity->callbacks->onResume = onResume;
     activity->callbacks->onSaveInstanceState = onSaveInstanceState;
-    activity->callbacks->onStart = onStart;
+    activity->callbacks->onPause = onPause;
     activity->callbacks->onStop = onStop;
+    activity->callbacks->onConfigurationChanged = onConfigurationChanged;
+    activity->callbacks->onLowMemory = onLowMemory;
     activity->callbacks->onWindowFocusChanged = onWindowFocusChanged;
+    activity->callbacks->onNativeWindowCreated = onNativeWindowCreated;
+    activity->callbacks->onNativeWindowDestroyed = onNativeWindowDestroyed;
+    activity->callbacks->onInputQueueCreated = onInputQueueCreated;
+    activity->callbacks->onInputQueueDestroyed = onInputQueueDestroyed;
 
     activity->instance = android_app_create(activity, savedState, savedStateSize);
 }
diff --git a/sources/android/native_app_glue/android_native_app_glue.h b/sources/android/native_app_glue/android_native_app_glue.h
index 35a786e..c99d6e1 100644
--- a/sources/android/native_app_glue/android_native_app_glue.h
+++ b/sources/android/native_app_glue/android_native_app_glue.h
@@ -12,9 +12,11 @@
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  * See the License for the specific language governing permissions and
  * limitations under the License.
+ *
  */
 
-#pragma once
+#ifndef _ANDROID_NATIVE_APP_GLUE_H
+#define _ANDROID_NATIVE_APP_GLUE_H
 
 #include <poll.h>
 #include <pthread.h>
@@ -47,7 +49,7 @@
  *
  * 2/ android_main() receives a pointer to a valid "android_app" structure
  *    that contains references to other important objects, e.g. the
- *    ANativeActivity object instance the application is running in.
+ *    ANativeActivity obejct instance the application is running in.
  *
  * 3/ the "android_app" object holds an ALooper instance that already
  *    listens to two important things:
@@ -330,7 +332,7 @@
 void android_app_post_exec_cmd(struct android_app* android_app, int8_t cmd);
 
 /**
- * No-op function that used to be used to prevent the linker from stripping app
+ * Dummy function that used to be used to prevent the linker from stripping app
  * glue code. No longer necessary, since __attribute__((visibility("default")))
  * does this for us.
  */
@@ -348,3 +350,5 @@
 #ifdef __cplusplus
 }
 #endif
+
+#endif /* _ANDROID_NATIVE_APP_GLUE_H */
diff --git a/sources/android/ndk_helper/GLContext.h b/sources/android/ndk_helper/GLContext.h
index 8862aa4..cc0db46 100644
--- a/sources/android/ndk_helper/GLContext.h
+++ b/sources/android/ndk_helper/GLContext.h
@@ -14,7 +14,11 @@
  * limitations under the License.
  */
 
-#pragma once
+//--------------------------------------------------------------------------------
+// GLContext.h
+//--------------------------------------------------------------------------------
+#ifndef GLCONTEXT_H_
+#define GLCONTEXT_H_
 
 #include <EGL/egl.h>
 #include <GLES2/gl2.h>
@@ -117,3 +121,5 @@
 };
 
 }   //namespace ndkHelper
+
+#endif /* GLCONTEXT_H_ */
diff --git a/sources/android/ndk_helper/NDKHelper.h b/sources/android/ndk_helper/NDKHelper.h
index c170b5b..de2a10e 100644
--- a/sources/android/ndk_helper/NDKHelper.h
+++ b/sources/android/ndk_helper/NDKHelper.h
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#pragma once
+#ifndef _NDKSUPPORT_H
+#define _NDKSUPPORT_H
 
 /******************************************************************
  * NDK support helpers
@@ -35,3 +36,4 @@
 #include "gestureDetector.h"    //Tap/Doubletap/Pinch detector
 #include "perfMonitor.h"        //FPS counter
 #include "interpolator.h"       //Interpolator
+#endif
diff --git a/sources/android/ndk_helper/gestureDetector.h b/sources/android/ndk_helper/gestureDetector.h
index 6c8683b..6ae5d80 100644
--- a/sources/android/ndk_helper/gestureDetector.h
+++ b/sources/android/ndk_helper/gestureDetector.h
@@ -14,7 +14,11 @@
  * limitations under the License.
  */
 
-#pragma once
+//--------------------------------------------------------------------------------
+// gestureDetector.h
+//--------------------------------------------------------------------------------
+#ifndef GESTUREDETECTOR_H_
+#define GESTUREDETECTOR_H_
 
 #include <vector>
 
@@ -159,3 +163,4 @@
 };
 
 }   //namespace ndkHelper
+#endif /* GESTUREDETECTOR_H_ */
diff --git a/sources/android/ndk_helper/gl3stub.h b/sources/android/ndk_helper/gl3stub.h
index 2b8df9c..c8960df 100644
--- a/sources/android/ndk_helper/gl3stub.h
+++ b/sources/android/ndk_helper/gl3stub.h
@@ -1,3 +1,6 @@
+#ifndef __gl3_h_
+#define __gl3_h_
+
 /*
  * stub gl3.h for dynamic loading, based on:
  * gl3.h last updated on $Date: 2013-02-12 14:37:24 -0800 (Tue, 12 Feb 2013) $
@@ -10,8 +13,6 @@
  * - Added gl3stubInit() declaration
  */
 
-#pragma once
-
 #include <GLES2/gl2.h>
 #include <android/api-level.h>
 
@@ -499,3 +500,5 @@
 #ifdef __cplusplus
 }
 #endif
+
+#endif
diff --git a/sources/android/ndk_helper/interpolator.h b/sources/android/ndk_helper/interpolator.h
index 61b8d8f..1226e33 100644
--- a/sources/android/ndk_helper/interpolator.h
+++ b/sources/android/ndk_helper/interpolator.h
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#pragma once
+#ifndef INTERPOLATOR_H_
+#define INTERPOLATOR_H_
 
 #include <jni.h>
 #include <errno.h>
@@ -85,3 +86,4 @@
 };
 
 }   //namespace ndkHelper
+#endif /* INTERPOLATOR_H_ */
diff --git a/sources/android/ndk_helper/perfMonitor.h b/sources/android/ndk_helper/perfMonitor.h
index b886d6d..f418c43 100644
--- a/sources/android/ndk_helper/perfMonitor.h
+++ b/sources/android/ndk_helper/perfMonitor.h
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#pragma once
+#ifndef PERFMONITOR_H_
+#define PERFMONITOR_H_
 
 #include <jni.h>
 #include <errno.h>
@@ -57,3 +58,4 @@
 };
 
 }   //namespace ndkHelper
+#endif /* PERFMONITOR_H_ */
diff --git a/sources/android/ndk_helper/shader.h b/sources/android/ndk_helper/shader.h
index 8a21a11..d362a53 100644
--- a/sources/android/ndk_helper/shader.h
+++ b/sources/android/ndk_helper/shader.h
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#pragma once
+#ifndef SHADER_H_
+#define SHADER_H_
 
 #include <jni.h>
 
@@ -120,3 +121,4 @@
 } //namespace shader
 
 } //namespace ndkHelper
+#endif /* SHADER_H_ */
diff --git a/sources/android/ndk_helper/tapCamera.h b/sources/android/ndk_helper/tapCamera.h
index 92f282e..7d124e8 100644
--- a/sources/android/ndk_helper/tapCamera.h
+++ b/sources/android/ndk_helper/tapCamera.h
@@ -15,7 +15,6 @@
  */
 
 #pragma once
-
 #include <vector>
 #include <string>
 #include <GLES2/gl2.h>
diff --git a/sources/android/ndk_helper/vecmath.h b/sources/android/ndk_helper/vecmath.h
index a81e4f8..0692e3d 100644
--- a/sources/android/ndk_helper/vecmath.h
+++ b/sources/android/ndk_helper/vecmath.h
@@ -14,7 +14,8 @@
  * limitations under the License.
  */
 
-#pragma once
+#ifndef VECMATH_H_
+#define VECMATH_H_
 
 #include <math.h>
 #include "JNIHelper.h"
@@ -1112,3 +1113,4 @@
 };
 
 } //namespace ndk_helper
+#endif /* VECMATH_H_ */
diff --git a/sources/android/renderscript/Android.mk b/sources/android/renderscript/Android.mk
new file mode 100644
index 0000000..9cc32a2
--- /dev/null
+++ b/sources/android/renderscript/Android.mk
@@ -0,0 +1,25 @@
+LOCAL_PATH:= $(RENDERSCRIPT_TOOLCHAIN_PREBUILT_ROOT)/platform
+
+# Prebuilt libRSSupport.so
+include $(CLEAR_VARS)
+LOCAL_MODULE:= RSSupport
+LOCAL_SRC_FILES:= $(TARGET_ARCH)/lib$(LOCAL_MODULE)$(TARGET_SONAME_EXTENSION)
+include $(PREBUILT_SHARED_LIBRARY)
+
+# Prebuilt libRSSupportIO.so
+include $(CLEAR_VARS)
+LOCAL_MODULE:= RSSupportIO
+LOCAL_SRC_FILES:= $(TARGET_ARCH)/lib$(LOCAL_MODULE)$(TARGET_SONAME_EXTENSION)
+include $(PREBUILT_SHARED_LIBRARY)
+
+# Prebuilt libblasV8.so
+include $(CLEAR_VARS)
+LOCAL_MODULE:= blasV8
+LOCAL_SRC_FILES:= $(TARGET_ARCH)/lib$(LOCAL_MODULE)$(TARGET_SONAME_EXTENSION)
+include $(PREBUILT_SHARED_LIBRARY)
+
+# Prebuilt libRScpp_static.a
+include $(CLEAR_VARS)
+LOCAL_MODULE:= RScpp_static
+LOCAL_SRC_FILES:= $(TARGET_ARCH)/lib$(LOCAL_MODULE)$(TARGET_LIB_EXTENSION)
+include $(PREBUILT_STATIC_LIBRARY)
\ No newline at end of file
diff --git a/sources/android/renderscript/NOTICE b/sources/android/renderscript/NOTICE
new file mode 100644
index 0000000..d6c0922
--- /dev/null
+++ b/sources/android/renderscript/NOTICE
@@ -0,0 +1,13 @@
+Copyright (C) 2016 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/sources/android/support/Android.mk b/sources/android/support/Android.mk
new file mode 100644
index 0000000..d4728a5
--- /dev/null
+++ b/sources/android/support/Android.mk
@@ -0,0 +1,130 @@
+LOCAL_PATH := $(call my-dir)
+
+# libandroid_support is only needed on LP32.
+ifeq ($(filter $(NDK_KNOWN_DEVICE_ABI64S),$(TARGET_ARCH_ABI)),)
+
+ifneq ($(LIBCXX_FORCE_REBUILD),true) # Using prebuilt
+
+LIBCXX_LIBS := ../../cxx-stl/llvm-libc++/libs/$(TARGET_ARCH_ABI)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := android_support
+LOCAL_SRC_FILES := $(LIBCXX_LIBS)/lib$(LOCAL_MODULE)$(TARGET_LIB_EXTENSION)
+include $(PREBUILT_STATIC_LIBRARY)
+
+else # Building
+
+android_support_cflags := \
+    -Drestrict=__restrict__ \
+    -ffunction-sections \
+    -fdata-sections \
+    -fvisibility=hidden \
+
+android_support_c_includes := \
+    $(BIONIC_PATH)/libc \
+    $(BIONIC_PATH)/libc/upstream-openbsd/android/include \
+    $(BIONIC_PATH)/libm \
+    $(BIONIC_PATH)/libm/upstream-freebsd/android/include \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src \
+
+android_support_cflags += \
+    -include freebsd-compat.h \
+    -include openbsd-compat.h \
+    -include $(LOCAL_PATH)/src/support_preinclude.h \
+    -D__BIONIC_BUILD_FOR_ANDROID_SUPPORT \
+    -Werror \
+
+android_support_sources := \
+    $(BIONIC_PATH)/libc/bionic/c32rtomb.cpp \
+    $(BIONIC_PATH)/libc/bionic/locale.cpp \
+    $(BIONIC_PATH)/libc/bionic/mbrtoc32.cpp \
+    $(BIONIC_PATH)/libc/bionic/wchar.cpp \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcscat.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcschr.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcslen.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcsncmp.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcsncpy.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcspbrk.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcsrchr.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcsspn.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcsstr.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wcstok.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wmemchr.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wmemcmp.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wmemcpy.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wmemmove.c \
+    $(BIONIC_PATH)/libc/upstream-freebsd/lib/libc/string/wmemset.c \
+    $(BIONIC_PATH)/libc/upstream-openbsd/lib/libc/locale/mbtowc.c \
+    $(BIONIC_PATH)/libc/upstream-openbsd/lib/libc/stdlib/imaxabs.c \
+    $(BIONIC_PATH)/libc/upstream-openbsd/lib/libc/stdlib/imaxdiv.c \
+    $(BIONIC_PATH)/libm/digittoint.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_acos.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_acosh.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_asin.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_atan2.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_atanh.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_cosh.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_exp.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_hypot.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_log.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_log10.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_log2.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_log2f.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_logf.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_remainder.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_sinh.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/e_sqrt.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/k_cos.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/k_exp.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/k_rem_pio2.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/k_sin.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/k_tan.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_asinh.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_atan.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_cbrt.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_cos.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_erf.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_exp2.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_expm1.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_frexp.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_frexpf.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_log1p.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_logb.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_nextafter.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_remquo.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_rint.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_sin.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_tan.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_tanh.c \
+    src/locale_support.cpp \
+    src/posix_memalign.cpp \
+    src/swprintf.cpp \
+    src/wcstox.cpp \
+
+ifeq (x86,$(TARGET_ARCH_ABI))
+# Replaces broken implementations in x86 libm.so
+android_support_sources += \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_scalbln.c \
+    $(BIONIC_PATH)/libm/upstream-freebsd/lib/msun/src/s_scalbn.c \
+
+# fake_long_double.c doesn't define this for x86.
+# TODO: seems like we don't pass .S files to the assembler?
+#android_support_c_includes += $(BIONIC_PATH)/libc/arch-x86/include
+#android_support_sources += $(BIONIC_PATH)/libm/x86/lrint.S
+endif
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := android_support
+LOCAL_SRC_FILES := $(android_support_sources)
+LOCAL_C_INCLUDES := $(android_support_c_includes)
+LOCAL_CFLAGS := $(android_support_cflags)
+
+LOCAL_CPPFLAGS := \
+    -fvisibility-inlines-hidden \
+    -std=c++11 \
+
+include $(BUILD_STATIC_LIBRARY)
+
+endif # Prebuilt/building
+
+endif # LP32
diff --git a/sources/android/support/NOTICE b/sources/android/support/NOTICE
new file mode 100644
index 0000000..82ed67f
--- /dev/null
+++ b/sources/android/support/NOTICE
@@ -0,0 +1,482 @@
+====================================================
+Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
+
+Developed at SunPro, a Sun Microsystems, Inc. business.
+Permission to use, copy, modify, and distribute this
+software is freely granted, provided that this notice
+is preserved.
+
+-------------------------------------------------------------------
+
+====================================================
+Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
+
+Developed at SunPro, a Sun Microsystems, Inc. business.
+Permission to use, copy, modify, and distribute this
+software is freely granted, provided that this notice
+is preserved.
+====================================================
+
+Optimized by Bruce D. Evans.
+
+-------------------------------------------------------------------
+
+====================================================
+Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
+
+Developed at SunSoft, a Sun Microsystems, Inc. business.
+Permission to use, copy, modify, and distribute this
+software is freely granted, provided that this notice
+is preserved.
+
+-------------------------------------------------------------------
+
+====================================================
+Copyright (C) 2004 by Sun Microsystems, Inc. All rights reserved.
+
+Permission to use, copy, modify, and distribute this
+software is freely granted, provided that this notice
+is preserved.
+
+-------------------------------------------------------------------
+
+====================================================
+Copyright 2004 Sun Microsystems, Inc.  All Rights Reserved.
+
+Permission to use, copy, modify, and distribute this
+software is freely granted, provided that this notice
+is preserved.
+
+-------------------------------------------------------------------
+
+Copyright (C) 2008 The Android Open Source Project
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+ * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in
+   the documentation and/or other materials provided with the
+   distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (C) 2013 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-------------------------------------------------------------------
+
+Copyright (C) 2013 The Android Open Source Project
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+ * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in
+   the documentation and/or other materials provided with the
+   distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (C) 2014 The Android Open Source Project
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+ * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in
+   the documentation and/or other materials provided with the
+   distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (C) 2017 The Android Open Source Project
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+ * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in
+   the documentation and/or other materials provided with the
+   distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 1989, 1993
+   The Regents of the University of California.  All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+3. Neither the name of the University nor the names of its contributors
+   may be used to endorse or promote products derived from this software
+   without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 1990, 1993
+   The Regents of the University of California.  All rights reserved.
+
+This code is derived from software contributed to Berkeley by
+Chris Torek.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+3. Neither the name of the University nor the names of its contributors
+   may be used to endorse or promote products derived from this software
+   without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 1998 Softweyr LLC.  All rights reserved.
+
+strtok_r, from Berkeley strtok
+Oct 13, 1998 by Wes Peters <wes@softweyr.com>
+
+Copyright (c) 1988, 1993
+   The Regents of the University of California.  All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notices, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notices, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+3. Neither the name of the University nor the names of its contributors
+   may be used to endorse or promote products derived from this software
+   without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY SOFTWEYR LLC, THE REGENTS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL SOFTWEYR LLC, THE
+REGENTS, OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2002 Tim J. Robbins
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2002-2004 Tim J. Robbins
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2002-2004 Tim J. Robbins.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2004 David Schultz <das@FreeBSD.ORG>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2005 David Schultz <das@FreeBSD.ORG>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2007 David Schultz
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c) 2011 David Schultz <das@FreeBSD.ORG>
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
+Copyright (c)1999 Citrus Project,
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+1. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
+
+-------------------------------------------------------------------
+
diff --git a/sources/android/support/include/inttypes.h b/sources/android/support/include/inttypes.h
new file mode 100644
index 0000000..00887bc
--- /dev/null
+++ b/sources/android/support/include/inttypes.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_INTTYPES_H
+#define NDK_ANDROID_SUPPORT_INTTYPES_H
+
+#include_next <inttypes.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_K__
+intmax_t imaxabs(intmax_t) __attribute_const__;
+imaxdiv_t imaxdiv(intmax_t, intmax_t) __attribute_const__;
+#endif
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+intmax_t wcstoimax(const wchar_t* __restrict, wchar_t** __restrict, int);
+uintmax_t wcstoumax(const wchar_t* __restrict, wchar_t** __restrict, int);
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/include/locale.h b/sources/android/support/include/locale.h
new file mode 100644
index 0000000..72f847b
--- /dev/null
+++ b/sources/android/support/include/locale.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_LOCALE_H
+#define NDK_ANDROID_SUPPORT_LOCALE_H
+
+#include_next <locale.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+struct lconv* localeconv(void);
+locale_t duplocale(locale_t);
+void freelocale(locale_t);
+locale_t newlocale(int, const char*, locale_t);
+locale_t uselocale(locale_t);
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/include/math.h b/sources/android/support/include/math.h
new file mode 100644
index 0000000..11d50cc
--- /dev/null
+++ b/sources/android/support/include/math.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_MATH_H
+#define NDK_ANDROID_SUPPORT_MATH_H
+
+#include_next <math.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_J_MR2__
+double        log2(double);
+float         log2f(float);
+long double   log2l(long double);
+long double   logbl(long double);
+float         tgammaf(float);
+#endif
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+long double   acoshl(long double);
+long double   acosl(long double);
+long double   asinhl(long double);
+long double   asinl(long double);
+long double   atan2l(long double, long double);
+long double   atanhl(long double);
+long double   atanl(long double);
+long double   cbrtl(long double);
+long double   coshl(long double);
+long double   cosl(long double);
+long double   erfcl(long double);
+long double   erfl(long double);
+long double   exp2l(long double);
+long double   expl(long double);
+long double   expm1l(long double);
+long double   fmodl(long double, long double);
+long double   hypotl(long double, long double);
+long double   lgammal(long double);
+long double   log10l(long double);
+long double   log1pl(long double);
+long double   logl(long double);
+long double   modfl(long double, long double*);
+long double   nearbyintl(long double);
+long double   powl(long double, long double);
+long double   remainderl(long double, long double);
+long double   remquol(long double, long double, int*);
+long double   rintl(long double);
+long double   sinhl(long double);
+long double   sinl(long double);
+long double   sqrtl(long double);
+long double   tanhl(long double);
+long double   tanl(long double);
+long double   tgammal(long double);
+long int      lrintl(long double);
+long long int llrintl(long double);
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/include/stdlib.h b/sources/android/support/include/stdlib.h
new file mode 100644
index 0000000..93e384d
--- /dev/null
+++ b/sources/android/support/include/stdlib.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_STDLIB_H
+#define NDK_ANDROID_SUPPORT_STDLIB_H
+
+#include_next <stdlib.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_J_MR1__
+int posix_memalign(void** memptr, size_t alignment, size_t size);
+#endif
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+long double strtold_l(const char*, char**, locale_t);
+long long strtoll_l(const char*, char**, int, locale_t);
+unsigned long long strtoull_l(const char*, char**, int, locale_t);
+int mbtowc(wchar_t*, const char*, size_t);
+int at_quick_exit(void (*)(void));
+void quick_exit(int) __noreturn;
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/include/uchar.h b/sources/android/support/include/uchar.h
new file mode 100644
index 0000000..42942c4
--- /dev/null
+++ b/sources/android/support/include/uchar.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_UCHAR_H
+#define NDK_ANDROID_SUPPORT_UCHAR_H
+
+#include_next <uchar.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+size_t c16rtomb(char* __restrict, char16_t, mbstate_t* __restrict);
+size_t c32rtomb(char* __restrict, char32_t, mbstate_t* __restrict);
+size_t mbrtoc16(char16_t* __restrict, const char* __restrict, size_t, mbstate_t* __restrict);
+size_t mbrtoc32(char32_t* __restrict, const char* __restrict, size_t, mbstate_t* __restrict);
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/include/wchar.h b/sources/android/support/include/wchar.h
new file mode 100644
index 0000000..9152cfe
--- /dev/null
+++ b/sources/android/support/include/wchar.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_WCHAR_H
+#define NDK_ANDROID_SUPPORT_WCHAR_H
+
+#include_next <wchar.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+int vfwscanf(FILE*, const wchar_t*, va_list);
+int vswscanf(const wchar_t*, const wchar_t*, va_list);
+int vwscanf(const wchar_t*, va_list);
+size_t mbsnrtowcs(wchar_t*, const char**, size_t, size_t, mbstate_t*);
+size_t wcsnrtombs(char*, const wchar_t**, size_t, size_t, mbstate_t*);
+float wcstof(const wchar_t*, wchar_t**);
+long long wcstoll(const wchar_t*, wchar_t**, int);
+unsigned long long wcstoull(const wchar_t*, wchar_t**, int);
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/include/wctype.h b/sources/android/support/include/wctype.h
new file mode 100644
index 0000000..31d015d
--- /dev/null
+++ b/sources/android/support/include/wctype.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef NDK_ANDROID_SUPPORT_WCTYPE_H
+#define NDK_ANDROID_SUPPORT_WCTYPE_H
+
+#include_next <wctype.h>
+
+__BEGIN_DECLS
+
+#if __ANDROID_API__ < __ANDROID_API_L__
+#include <ctype.h>
+static __inline int iswblank(wint_t ch) { return isblank(ch); }
+#endif
+
+__END_DECLS
+
+#endif
diff --git a/sources/android/support/regenerate-NOTICE.sh b/sources/android/support/regenerate-NOTICE.sh
new file mode 100755
index 0000000..0d659c3
--- /dev/null
+++ b/sources/android/support/regenerate-NOTICE.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+# Change into the libandroid_support directory.
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+cd $DIR
+
+# Pull the notices from the files in libandroid_support itself (via `.`),
+# plus all the bionic files we pull in.
+sed '/$(BIONIC_PATH).*\.c/ { s| *$(BIONIC_PATH)|../../../../bionic/| ; s| *\\$|| ; p } ; d' Android.mk | \
+    xargs ../../../../bionic/libc/tools/generate-NOTICE.py . > NOTICE
+
+# Show the caller what we've done.
+git diff --exit-code HEAD ./NOTICE
+exit $?
diff --git a/sources/android/support/src/UniquePtr.h b/sources/android/support/src/UniquePtr.h
new file mode 100644
index 0000000..7479bcf
--- /dev/null
+++ b/sources/android/support/src/UniquePtr.h
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+#ifndef ANDROID_SUPPORT_UNIQUE_PTR_H
+#define ANDROID_SUPPORT_UNIQUE_PTR_H
+
+namespace {
+
+#define DISALLOW_COPY_AND_ASSIGN(TypeName) \
+  TypeName(const TypeName&) = delete;      \
+  void operator=(const TypeName&) = delete
+
+template <typename T>
+struct DefaultDelete {
+  enum { type_must_be_complete = sizeof(T) };
+  DefaultDelete() {
+  }
+  void operator()(T* p) const {
+    delete p;
+  }
+};
+
+template <typename T>
+struct DefaultDelete<T[]> {
+  enum { type_must_be_complete = sizeof(T) };
+  void operator()(T* p) const {
+    delete[] p;
+  }
+};
+
+template <typename T, typename D = DefaultDelete<T> >
+class UniquePtr {
+ public:
+  explicit UniquePtr(T* ptr = NULL) : mPtr(ptr) {
+  }
+
+  ~UniquePtr() {
+    reset();
+  }
+
+  T& operator*() const {
+    return *mPtr;
+  }
+  T* operator->() const {
+    return mPtr;
+  }
+  T* get() const {
+    return mPtr;
+  }
+
+  T* release() __attribute__((warn_unused_result)) {
+    T* result = mPtr;
+    mPtr = NULL;
+    return result;
+  }
+
+  void reset(T* ptr = NULL) {
+    if (ptr != mPtr) {
+      D()(mPtr);
+      mPtr = ptr;
+    }
+  }
+
+ private:
+  T* mPtr;
+
+  template <typename T2>
+  bool operator==(const UniquePtr<T2>& p) const;
+  template <typename T2>
+  bool operator!=(const UniquePtr<T2>& p) const;
+
+  DISALLOW_COPY_AND_ASSIGN(UniquePtr);
+};
+
+// Partial specialization for array types. Like std::unique_ptr, this removes
+// operator* and operator-> but adds operator[].
+template <typename T, typename D>
+class UniquePtr<T[], D> {
+ public:
+  explicit UniquePtr(T* ptr = NULL) : mPtr(ptr) {
+  }
+
+  ~UniquePtr() {
+    reset();
+  }
+
+  T& operator[](size_t i) const {
+    return mPtr[i];
+  }
+  T* get() const {
+    return mPtr;
+  }
+
+  T* release() __attribute__((warn_unused_result)) {
+    T* result = mPtr;
+    mPtr = NULL;
+    return result;
+  }
+
+  void reset(T* ptr = NULL) {
+    if (ptr != mPtr) {
+      D()(mPtr);
+      mPtr = ptr;
+    }
+  }
+
+ private:
+  T* mPtr;
+
+  DISALLOW_COPY_AND_ASSIGN(UniquePtr);
+};
+
+} // anonymous namespace
+
+#endif  /* ANDROID_SUPPORT_UNIQUE_PTR_H */
diff --git a/sources/android/support/src/locale_support.cpp b/sources/android/support/src/locale_support.cpp
new file mode 100644
index 0000000..a730fc6
--- /dev/null
+++ b/sources/android/support/src/locale_support.cpp
@@ -0,0 +1,16 @@
+#include <stdlib.h>
+#include <xlocale.h>
+
+long long strtoll_l(const char* nptr, char** endptr, int base, locale_t loc) {
+  return strtoll(nptr, endptr, base);
+}
+
+unsigned long long strtoull_l(const char* nptr, char** endptr, int base,
+                              locale_t loc) {
+  return strtoull(nptr, endptr, base);
+}
+
+long double strtold_l(const char* nptr, char** endptr,
+                      locale_t __unused locale) {
+  return strtold(nptr, endptr);
+}
diff --git a/sources/android/support/src/posix_memalign.cpp b/sources/android/support/src/posix_memalign.cpp
new file mode 100644
index 0000000..cf7abbb
--- /dev/null
+++ b/sources/android/support/src/posix_memalign.cpp
@@ -0,0 +1,20 @@
+#include <errno.h>
+#include <malloc.h>
+#include <stdlib.h>
+
+int posix_memalign(void** memptr, size_t alignment, size_t size) {
+  if ((alignment & (alignment - 1)) != 0 || alignment == 0) {
+    return EINVAL;
+  }
+
+  if (alignment % sizeof(void*) != 0) {
+    return EINVAL;
+  }
+
+  *memptr = memalign(alignment, size);
+  if (*memptr == NULL) {
+    return errno;
+  }
+
+  return 0;
+}
diff --git a/sources/android/support/src/support_preinclude.h b/sources/android/support/src/support_preinclude.h
new file mode 100644
index 0000000..bf090b0
--- /dev/null
+++ b/sources/android/support/src/support_preinclude.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#pragma once
+
+// Declare cimag and creal for use by __ldexp_cexp. libandroid_runtime doesn't
+// really need __ldexp_cexp, but it does need __ldexp_exp, and both functions
+// are defined in the same C file. complex.h doesn't declare cimag/creal when
+// building libandroid_support, because the functions are only available
+// starting with M, and libandroid_support is compiled for the oldest supported
+// NDK API.
+//
+// The two functions are trivial (and have __builtin_{cimag,creal}
+// equivalents). Clang inlines calls to these functions even with -O0.
+double cimag(double _Complex z);
+double creal(double _Complex z);
diff --git a/sources/android/support/src/swprintf.cpp b/sources/android/support/src/swprintf.cpp
new file mode 100644
index 0000000..01cfe5d
--- /dev/null
+++ b/sources/android/support/src/swprintf.cpp
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <stdio.h>
+#include <wchar.h>
+
+#include "UniquePtr.h"
+
+namespace {
+const size_t MBS_FAILURE = static_cast<size_t>(-1);
+}
+
+int swprintf(wchar_t* wcs, size_t maxlen, const wchar_t* format, ...) {
+  va_list ap;
+  va_start(ap, format);
+  int result = vswprintf(wcs, maxlen, format, ap);
+  va_end(ap);
+  return result;
+}
+
+int vswprintf(wchar_t* wcs, size_t maxlen, const wchar_t* fmt, va_list ap) {
+  mbstate_t mbstate;
+  memset(&mbstate, 0, sizeof(mbstate));
+
+  // At most, each wide character (UTF-32) can be expanded to four narrow
+  // characters (UTF-8).
+  const size_t max_mb_len = maxlen * 4;
+  const size_t mb_fmt_len = wcslen(fmt) * 4 + 1;
+  UniquePtr<char[]> mbfmt(new char[mb_fmt_len]);
+  if (wcsrtombs(mbfmt.get(), &fmt, mb_fmt_len, &mbstate) == MBS_FAILURE) {
+    return -1;
+  }
+
+  UniquePtr<char[]> mbs(new char[max_mb_len]);
+  int nprinted = vsnprintf(mbs.get(), max_mb_len, mbfmt.get(), ap);
+  if (nprinted == -1) {
+    return -1;
+  }
+
+  const char* mbsp = mbs.get();
+  if (mbsrtowcs(wcs, &mbsp, maxlen, &mbstate) == MBS_FAILURE) {
+    return -1;
+  }
+
+  // Can't use return value from vsnprintf because that number is in narrow
+  // characters, not wide characters.
+  int result = wcslen(wcs);
+
+  // swprintf differs from snprintf in that it returns -1 if the output was
+  // truncated.
+  //
+  // Truncation can occur in two places:
+  // 1) vsnprintf truncated, in which case the return value is greater than the
+  //    length we passed.
+  // 2) Since the char buffer we pass to vsnprintf might be oversized, that
+  //    might not truncate while mbsrtowcs will. In this case, mbsp will point
+  //    to the next unconverted character instead of nullptr.
+  if (nprinted >= max_mb_len || mbsp != nullptr) {
+    return -1;
+  }
+
+  return result;
+}
diff --git a/sources/android/support/src/wcstox.cpp b/sources/android/support/src/wcstox.cpp
new file mode 100644
index 0000000..c799b3f
--- /dev/null
+++ b/sources/android/support/src/wcstox.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <inttypes.h>
+#include <stdlib.h>
+#include <wchar.h>
+
+#include "UniquePtr.h"
+
+namespace {
+constexpr size_t MBS_FAILURE = static_cast<size_t>(-1);
+}
+
+template <typename T>
+static T wcstox(T (*func)(const char*, char**, int), const wchar_t* wcs,
+                wchar_t** wcs_end, int base) {
+  mbstate_t mbstate;
+  memset(&mbstate, 0, sizeof(mbstate));
+
+  if (wcs_end != nullptr) {
+    *wcs_end = const_cast<wchar_t*>(wcs);
+  }
+
+  const size_t max_mb_len = wcslen(wcs) * 4 + 1;
+  UniquePtr<char[]> mbs(new char[max_mb_len]);
+  const wchar_t* s = wcs;
+  if (wcsrtombs(mbs.get(), &s, max_mb_len, &mbstate) == MBS_FAILURE) {
+    return static_cast<T>(0);
+  }
+
+  char* mbs_end;
+  T value = func(mbs.get(), &mbs_end, base);
+  if (wcs_end == nullptr) {
+    // If the user passed nullptr for the end pointer, we don't need to compute
+    // it and can return early.
+    return value;
+  }
+
+  // strto* can set ERANGE or EINVAL. Preserve the value of errno in case any of
+  // the things we're about to do to comput the end pointer don't clobber it.
+  int preserved_errno = errno;
+
+  // wcs_end needs to point to the character after the one converted. We don't
+  // know how many wide characters were converted, but we can figure that out by
+  // converting the multibyte string between mbs and mbs_end back to a wide
+  // character string.
+  size_t converted_len = mbs_end - mbs.get();
+  UniquePtr<char[]> converted_mbs(new char[converted_len + 1]);
+  strncpy(converted_mbs.get(), mbs.get(), converted_len);
+  converted_mbs[converted_len] = '\0';
+
+  const char* mbsp = converted_mbs.get();
+  size_t converted_wlen = mbsrtowcs(nullptr, &mbsp, 0, &mbstate);
+  if (converted_wlen == MBS_FAILURE) {
+    // This should be impossible.
+    abort();
+  }
+
+  *wcs_end = const_cast<wchar_t*>(wcs) + converted_wlen;
+  errno = preserved_errno;
+  return value;
+}
+
+static float strtof_wrapper(const char* s, char** p, int) {
+  return strtof(s, p);
+}
+
+float wcstof(const wchar_t* s, wchar_t** p) {
+  return wcstox(strtof_wrapper, s, p, 0);
+}
+
+static double strtod_wrapper(const char* s, char** p, int) {
+  return strtod(s, p);
+}
+
+double wcstod(const wchar_t *restrict s, wchar_t **restrict p) {
+  return wcstox(strtod_wrapper, s, p, 0);
+}
+
+long wcstol(const wchar_t *restrict s, wchar_t **restrict p, int base) {
+  return wcstox(strtol, s, p, base);
+}
+
+unsigned long wcstoul(const wchar_t *restrict s, wchar_t **restrict p, int base) {
+  return wcstox(strtoul, s, p, base);
+}
+
+long long wcstoll(const wchar_t *restrict s, wchar_t **restrict p, int base) {
+  return wcstox(strtoll, s, p, base);
+}
+
+unsigned long long wcstoull(const wchar_t *restrict s, wchar_t **restrict p, int base) {
+  return wcstox(strtoull, s, p, base);
+}
+
+intmax_t wcstoimax(const wchar_t *restrict s, wchar_t **restrict p, int base) {
+  return wcstox(strtoimax, s, p, base);
+}
+
+uintmax_t wcstoumax(const wchar_t *restrict s, wchar_t **restrict p, int base) {
+  return wcstox(strtoumax, s, p, base);
+}
diff --git a/sources/crt/crtbrand.S b/sources/crt/crtbrand.S
index fa5c3bd..9dfcfd2 100644
--- a/sources/crt/crtbrand.S
+++ b/sources/crt/crtbrand.S
@@ -27,7 +27,7 @@
  */
 
   .section .note.android.ident,"a",%note
-  .balign 4
+  .align 2
   .type note_android_ident, %object
 note_android_ident:
   .long note_data-note_name  // int32_t n_namesz
@@ -52,17 +52,3 @@
 
 note_end:
   .size note_android_ident, .-note_android_ident
-
-#if defined(__aarch64__)
-  .pushsection .note.gnu.property, "a"
-    .balign 8
-    .long 4
-    .long 0x10
-    .long 0x5        // NT_GNU_PROPERTY_TYPE_0
-    .asciz "GNU"
-    .long 0xc0000000 // GNU_PROPERTY_AARCH64_FEATURE_1_AND
-    .long 4
-    .long 0x3        // PAuth and BTI compatible
-    .long 0
-  .popsection
-#endif
diff --git a/sources/cxx-stl/system/include/cstdlib b/sources/cxx-stl/system/include/cstdlib
index a19190f..ef29fa7 100644
--- a/sources/cxx-stl/system/include/cstdlib
+++ b/sources/cxx-stl/system/include/cstdlib
@@ -106,14 +106,10 @@
 using ::lldiv_t;
 using ::lldiv;
 
-#if __ANDROID_API__ >= 26
 using ::mblen;
-#endif
 using ::mbstowcs;
-#if __ANDROID_API__ >= 21
 using ::mbtowc;
 using ::wctomb;
-#endif
 using ::wcstombs;
 
 #if __ANDROID_API__ >= 9
diff --git a/sources/host-tools/gdb-stub/gdb-stub.c b/sources/host-tools/gdb-stub/gdb-stub.c
new file mode 100644
index 0000000..49c6fc3
--- /dev/null
+++ b/sources/host-tools/gdb-stub/gdb-stub.c
@@ -0,0 +1,201 @@
+/***************************************************************************
+** The BSD 3-Clause License. http://www.opensource.org/licenses/BSD-3-Clause
+**
+** This file is part of 'mingw-builds' project.
+** Copyright (c) 2011,2012,2013 by niXman (i dotty nixman doggy gmail dotty com)
+** All rights reserved.
+**
+** Project: mingw-builds ( http://sourceforge.net/projects/mingwbuilds/ )
+**
+** Redistribution and use in source and binary forms, with or without 
+** modification, are permitted provided that the following conditions are met:
+** - Redistributions of source code must retain the above copyright 
+**     notice, this list of conditions and the following disclaimer.
+** - Redistributions in binary form must reproduce the above copyright 
+**     notice, this list of conditions and the following disclaimer in 
+**     the documentation and/or other materials provided with the distribution.
+** - Neither the name of the 'mingw-builds' nor the names of its contributors may 
+**     be used to endorse or promote products derived from this software 
+**     without specific prior written permission.
+**
+** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 
+** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 
+** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 
+** A PARTICULAR PURPOSE ARE DISCLAIMED.
+** IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY 
+** DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 
+** (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 
+** OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 
+** CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 
+** OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE 
+** USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+**
+***************************************************************************/
+
+#include <windows.h>
+
+#include <stdio.h>
+#include <strings.h>
+
+#ifdef _DEBUG
+ #define dbg_printf(...) printf(__VA_ARGS__)
+#else
+ #define dbg_printf(...) do {} while(0)
+#endif
+
+#define GDB_TO_PYTHON_REL_DIR "."
+
+#define GDB_EXECUTABLE_ORIG_FILENAME "gdb-orig.exe"
+
+// The stub is installed to $PREBUILTS/bin, PYTHONHOME is $PREBUILTS.
+#define PYTHONHOME_REL_DIR ".."
+
+#define DIE_IF_FALSE(var) \
+	do { \
+		if ( !(var) ) { \
+			fprintf(stderr, "%s(%d)[%d]: expression \"%s\" fail. terminate.\n" \
+				,__FILE__ \
+				,__LINE__ \
+				,GetLastError() \
+				,#var \
+			); \
+			exit(1); \
+		} \
+	} while (0)
+
+int main(int argc, char** argv) {
+	enum {
+		 envbufsize = 1024*32
+		,exebufsize = 1024
+		,cmdbufsize = envbufsize
+	};
+
+	char *envbuf, *sep, *resbuf, *cmdbuf;
+	DWORD len, exitCode;
+	STARTUPINFO si;
+	PROCESS_INFORMATION pi;
+
+	DIE_IF_FALSE(
+		(envbuf = (char *)malloc(envbufsize))
+	);
+	DIE_IF_FALSE(
+		(cmdbuf = (char *)malloc(cmdbufsize))
+	);
+	*cmdbuf = 0;
+
+	DIE_IF_FALSE(
+		GetEnvironmentVariable("PATH", envbuf, envbufsize)
+	);
+	dbg_printf("env: %s\n", envbuf);
+
+	DIE_IF_FALSE(
+		GetModuleFileName(0, cmdbuf, exebufsize)
+	);
+	dbg_printf("curdir: %s\n", cmdbuf);
+
+	DIE_IF_FALSE(
+		(sep = strrchr(cmdbuf, '\\'))
+	);
+	*(sep+1) = 0;
+	strcat(cmdbuf, GDB_TO_PYTHON_REL_DIR);
+	dbg_printf("sep: %s\n", cmdbuf);
+
+	len = strlen(envbuf)+strlen(cmdbuf)
+		+1  /* for envronment separator */
+		+1; /* for zero-terminator */
+
+	DIE_IF_FALSE(
+		(resbuf = (char *)malloc(len))
+	);
+
+	DIE_IF_FALSE(
+		(snprintf(resbuf, len, "%s;%s", cmdbuf, envbuf) > 0)
+	);
+	dbg_printf("PATH: %s\n", resbuf);
+
+	DIE_IF_FALSE(
+		SetEnvironmentVariable("PATH", resbuf)
+	);
+
+	*(sep+1) = 0;
+	strcat(cmdbuf, PYTHONHOME_REL_DIR);
+	dbg_printf("PYTHONHOME: %s\n", cmdbuf);
+	DIE_IF_FALSE(
+		SetEnvironmentVariable("PYTHONHOME", cmdbuf)
+	);
+
+	*(sep+1) = 0;
+	strcat(cmdbuf, GDB_EXECUTABLE_ORIG_FILENAME" ");
+
+	if ( argc > 1 ) {
+		for ( ++argv; *argv; ++argv ) {
+			len = strlen(cmdbuf);
+			snprintf(cmdbuf+len, cmdbufsize-len, "%s ", *argv);
+		}
+	}
+	dbg_printf("cmd: %s\n", cmdbuf);
+
+	HANDLE ghJob = CreateJobObject(NULL, "Gdb-Wrapper\0"/*NULL*/);
+	if ( ghJob == NULL ) {
+        fprintf(stderr, "Could not create job object\n");
+	}
+	else{
+		JOBOBJECT_EXTENDED_LIMIT_INFORMATION jeli = { 0 };
+		// Configure all child processes associated with the job to terminate when the last handle to the job is closed
+		jeli.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE;
+		if ( SetInformationJobObject(ghJob, JobObjectExtendedLimitInformation, &jeli, sizeof(jeli)) == 0 ) {
+            fprintf(stderr, "Could not SetInformationJobObject\n");
+		}
+	}
+
+	memset(&si, 0, sizeof(si));
+	si.cb = sizeof(si);
+	si.dwFlags |= STARTF_USESTDHANDLES;
+	si.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
+	si.hStdOutput = GetStdHandle(STD_OUTPUT_HANDLE);
+	si.hStdError = GetStdHandle(STD_ERROR_HANDLE);
+
+	memset(&pi, 0, sizeof(pi));
+
+	DIE_IF_FALSE(
+		CreateProcess(
+			0			// exe name
+			,cmdbuf		// command line
+			,0			// process security attributes
+			,0			// primary thread security attributes
+			,TRUE		// handles are inherited
+			,0			// creation flags
+			,0			// use parent's environment
+			,0			// use parent's current directory
+			,&si		// STARTUPINFO pointer
+			,&pi		// receives PROCESS_INFORMATION
+		)
+	);
+
+	if ( ghJob != NULL )
+		if ( AssignProcessToJobObject(ghJob, pi.hProcess) == 0 ) {
+            fprintf(stderr, "Could not AssignProcessToObject\n");
+		}
+
+	// Do not handle Ctrl-C in the wrapper
+	SetConsoleCtrlHandler(NULL, TRUE);
+
+	WaitForSingleObject(pi.hProcess, INFINITE);
+
+	DIE_IF_FALSE(
+		GetExitCodeProcess(pi.hProcess, &exitCode)
+	);
+
+	if ( ghJob != NULL )
+		CloseHandle(ghJob);
+	CloseHandle( pi.hProcess );
+	CloseHandle( pi.hThread );
+
+	free(envbuf);
+	free(resbuf);
+	free(cmdbuf);
+
+	dbg_printf("exiting with exitCode %d", exitCode);
+
+	return exitCode;
+}
diff --git a/sources/host-tools/toolbox/build-toolbox.sh b/sources/host-tools/toolbox/build-toolbox.sh
new file mode 100755
index 0000000..d77e1e7
--- /dev/null
+++ b/sources/host-tools/toolbox/build-toolbox.sh
@@ -0,0 +1,116 @@
+#!/bin/sh
+#
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#  This shell script is used to rebuild the toolbox programs which sources
+#  are under $NDK/sources/host-tools/toolbox
+#
+
+# include common function and variable definitions
+. $NDK_BUILDTOOLS_PATH/prebuilt-common.sh
+. $NDK_BUILDTOOLS_PATH/builder-funcs.sh
+
+PROGRAM_PARAMETERS=""
+
+PROGRAM_DESCRIPTION=\
+"Rebuild the prebuilt host toolbox binaries for the Android NDK.
+
+These are simple command-line programs used by the NDK build script.
+
+By default, this will try to place the binaries inside the current NDK
+directory, unless you use the --ndk-dir=<path> option.
+"
+
+PACKAGE_DIR=
+register_var_option "--package-dir=<path>" PACKAGE_DIR "Put prebuilt tarballs into <path>."
+
+NDK_DIR=
+register_var_option "--ndk-dir=<path>" NDK_DIR "Specify NDK root path for the build."
+
+BUILD_DIR=
+register_var_option "--build-dir=<path>" BUILD_DIR "Specify temporary build dir."
+
+NO_MAKEFILE=
+register_var_option "--no-makefile" NO_MAKEFILE "Do not use makefile to speed-up build"
+
+PACKAGE_DIR=
+register_var_option "--package-dir=<path>" PACKAGE_DIR "Archive binaries into package directory"
+
+register_jobs_option
+register_try64_option
+
+extract_parameters "$@"
+
+# Handle NDK_DIR
+if [ -z "$NDK_DIR" ] ; then
+    NDK_DIR=$ANDROID_NDK_ROOT
+    log "Auto-config: --ndk-dir=$NDK_DIR"
+else
+    if [ ! -d "$NDK_DIR" ] ; then
+        echo "ERROR: NDK directory does not exists: $NDK_DIR"
+        exit 1
+    fi
+fi
+
+rm -rf $BUILD_DIR/* && mkdir -p $BUILD_DIR
+fail_panic "Could not create build directory: $BUILD_DIR"
+
+if [ -z "$NO_MAKEFILE" ]; then
+    MAKEFILE=$BUILD_DIR/Makefile
+else
+    MAKEFILE=
+fi
+
+TOOLBOX_SRCDIR=$ANDROID_NDK_ROOT/sources/host-tools/toolbox
+
+BUILD_WINDOWS_SOURCES=yes
+
+if [ "$BUILD_WINDOWS_SOURCES" ]; then
+    ORIGINAL_HOST_TAG=$HOST_TAG
+    MINGW=yes
+    handle_canadian_build
+    prepare_canadian_toolchain $BUILD_DIR
+
+    SUBDIR=$(get_prebuilt_install_prefix $HOST_TAG)/bin
+    DSTDIR=$NDK_DIR/$SUBDIR
+    mkdir -p "$DSTDIR"
+    fail_panic "Could not create destination directory: $DSTDIR"
+
+    # Build echo.exe
+    HOST_TAG=$ORIGINAL_HOST_TAG
+    builder_begin_host "$BUILD_DIR" "$MAKEFILE"
+    builder_set_srcdir "$TOOLBOX_SRCDIR"
+    builder_set_dstdir "$DSTDIR"
+    builder_cflags -std=c99
+    builder_sources echo_win.c
+    builder_host_executable echo
+    builder_end
+
+    # Build cmp.exe
+    HOST_TAG=$ORIGINAL_HOST_TAG
+    builder_begin_host "$BUILD_DIR" "$MAKEFILE"
+    builder_set_srcdir "$TOOLBOX_SRCDIR"
+    builder_set_dstdir "$DSTDIR"
+    builder_sources cmp_win.c
+    builder_host_executable cmp
+    builder_end
+
+    if [ "$PACKAGE_DIR" ]; then
+        ARCHIVE=toolbox-$HOST_TAG.tar.bz2
+        dump "Packaging : $ARCHIVE"
+        pack_archive "$PACKAGE_DIR/$ARCHIVE" "$NDK_DIR" "$SUBDIR/echo.exe" "$SUBDIR/cmp.exe"
+        fail_panic "Could not package toolbox binaires"
+    fi
+fi
diff --git a/sources/host-tools/toolbox/build.py b/sources/host-tools/toolbox/build.py
new file mode 100755
index 0000000..6d34955
--- /dev/null
+++ b/sources/host-tools/toolbox/build.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Builds NDK toolbox.
+
+Toolbox is just a POSIX compatible cmp.exe and echo.exe for Windows.
+"""
+from __future__ import print_function
+
+import os
+import site
+import sys
+
+site.addsitedir(os.path.join(os.path.dirname(__file__), '../../../build/lib'))
+site.addsitedir(os.path.join(os.path.dirname(__file__), '../../..'))
+
+# pylint: disable=import-error,wrong-import-position
+import build_support
+from ndk.hosts import Host
+# pylint: enable=import-error,wrong-import-position
+
+
+def main(args):
+    if not args.host.is_windows:
+        sys.exit('Toolbox is only for Windows hosts.')
+
+    toolbox_src = os.path.join(args.out_dir, 'toolbox')
+    build_cmd = [
+        'bash',
+        'build-toolbox.sh',
+        '--try-64',
+        f'--build-dir={toolbox_src}',
+    ]
+    build_support.build(build_cmd, args, intermediate_package=True)
+
+
+if __name__ == '__main__':
+    build_support.run(main)
diff --git a/tests/build/NDK_ANALYZE/project/jni/foo.cpp b/tests/build/NDK_ANALYZE/project/jni/foo.cpp
index 7a86c46..6aaa547 100644
--- a/tests/build/NDK_ANALYZE/project/jni/foo.cpp
+++ b/tests/build/NDK_ANALYZE/project/jni/foo.cpp
@@ -1,5 +1,5 @@
 #include <malloc.h>
 
 void foo() {
-  malloc(10);
+  malloc(0);
 }
diff --git a/tests/build/NDK_ANALYZE/test.py b/tests/build/NDK_ANALYZE/test.py
index 9242b12..91f5316 100644
--- a/tests/build/NDK_ANALYZE/test.py
+++ b/tests/build/NDK_ANALYZE/test.py
@@ -15,31 +15,32 @@
 #
 import os
 import subprocess
-import sys
-
-from ndk.test.spec import BuildConfiguration
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
-    """Checks ndk-build output for clang-tidy warnings."""
-    ndk_build = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = "project"
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
-        "NDK_ANALYZE=1",
+def run_test(ndk_path, abi, platform, linker, build_flags):
+    """Runs the static analyzer on a sample project."""
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    project_path = 'project'
+    analyzer_out = os.path.join(project_path, 'report')
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
+        'NDK_ANALYZE=1',
+        f'NDK_ANALYZER_OUT={analyzer_out}',
     ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", project_path] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     out, _ = proc.communicate()
-    if proc.returncode != 0:
-        return proc.returncode == 0, out
+    out = out.decode('utf-8')
+    # We expect the analyzer to find an issue and exit with a failure.
+    if proc.returncode == 0:
+        return False, out
 
-    expect = "warning: Potential memory leak [clang-analyzer-unix.Malloc]"
-    return expect in out, out
+    analyzer_abi_out = os.path.join(analyzer_out, abi)
+    # The out directory gets created even if the analyzer fails, so we
+    # intentionally include bad code and make sure we get a failure report.
+    if not os.listdir(analyzer_abi_out):
+        return False, 'No analyzer output found in ' + analyzer_abi_out
+
+    return True, out
diff --git a/tests/build/NDK_ANALYZE/test_config.py b/tests/build/NDK_ANALYZE/test_config.py
index 73f1652..250efaf 100644
--- a/tests/build/NDK_ANALYZE/test_config.py
+++ b/tests/build/NDK_ANALYZE/test_config.py
@@ -3,6 +3,6 @@
 
 
 def build_unsupported(_test):
-    if sys.platform == "win32":
+    if sys.platform == 'win32':
         return sys.platform
     return None
diff --git a/tests/build/alignment_compat/project/CMakeLists.txt b/tests/build/alignment_compat/project/CMakeLists.txt
deleted file mode 100644
index 7928b9b..0000000
--- a/tests/build/alignment_compat/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(DefaultAlignment LANGUAGES CXX)
-
-add_library(foo SHARED jni/foo.cpp)
diff --git a/tests/build/alignment_compat/project/jni/Application.mk b/tests/build/alignment_compat/project/jni/Application.mk
deleted file mode 100644
index ee2e673..0000000
--- a/tests/build/alignment_compat/project/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_SUPPORT_FLEXIBLE_PAGE_SIZES := true
diff --git a/tests/build/alignment_compat/project/jni/foo.cpp b/tests/build/alignment_compat/project/jni/foo.cpp
deleted file mode 100644
index 5bff1b5..0000000
--- a/tests/build/alignment_compat/project/jni/foo.cpp
+++ /dev/null
@@ -1,2 +0,0 @@
-extern "C" void foo() {
-}
diff --git a/tests/build/alignment_compat/test.py b/tests/build/alignment_compat/test.py
deleted file mode 100644
index 46f2f66..0000000
--- a/tests/build/alignment_compat/test.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#
-# Copyright (C) 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Tests that binaries are built with the correct max-page-size."""
-from pathlib import Path
-from subprocess import CalledProcessError
-import subprocess
-import re
-from collections.abc import Iterator
-
-from ndk.abis import Abi
-from ndk.hosts import Host
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.builders import CMakeBuilder, NdkBuildBuilder
-
-
-PROJECT_PATH = Path("project")
-
-
-def iter_load_alignments(readelf_output: str) -> Iterator[tuple[int, int]]:
-    """Iterates over the offset and alignment of each LOAD section."""
-    # Example output:
-    #
-    #   Type           Offset   VirtAddr           PhysAddr           FileSiz  MemSiz   Flg Align
-    #   PHDR           0x000040 0x0000000000000040 0x0000000000000040 0x0002a0 0x0002a0 R   0x8
-    #   LOAD           0x000000 0x0000000000000000 0x0000000000000000 0x099604 0x099604 R   0x1000
-    pattern = re.compile(r"^\s+LOAD\s+(0x[0-9a-fA-F]+).+(0x[0-9a-fA-F]+)$")
-    for line in readelf_output.splitlines():
-        if "LOAD" not in line:
-            continue
-        if (match := pattern.search(line)) is not None:
-            yield int(match.group(1), base=16), int(match.group(2), base=16)
-        else:
-            raise ValueError(f"Could not parse LOAD line {line}")
-
-
-def verify_load_section_alignment(
-    path: Path, ndk: Path, expected_alignment: int
-) -> tuple[bool, str | None]:
-    """Verifies that each LOAD section in the given file has the correct alignment."""
-    readelf = (
-        ndk / "toolchains/llvm/prebuilt" / Host.current().tag / "bin" / "llvm-readelf"
-    )
-    readelf = readelf.with_suffix(Host.current().exe_suffix)
-    output = subprocess.run(
-        [readelf, "-lW", path], check=True, capture_output=True, text=True
-    ).stdout
-    for offset, alignment in iter_load_alignments(output):
-        if alignment != expected_alignment:
-            return (
-                False,
-                f"{path.resolve()}: LOAD section at {offset:x} has incorrect alignment {alignment:x}. "
-                f"Expected {expected_alignment:x}",
-            )
-    return True, None
-
-
-def verify_load_section_alignment_each_file(
-    paths: list[Path], ndk: Path, expected_alignment: int
-) -> tuple[bool, str | None]:
-    """Verifies that the LOAD section alignment is correct for each given file."""
-    for path in paths:
-        result, text = verify_load_section_alignment(path, ndk, expected_alignment)
-        if not result:
-            return result, text
-    return True, None
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str | None]:
-    """Checks that the binary's LOAD sections have the correct alignment."""
-    cmake_builder = CMakeBuilder.from_build_config(
-        PROJECT_PATH,
-        Path(ndk_path),
-        config,
-        cmake_build_flags=["-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON"],
-    )
-    # Page-size compat for ndk-build is enabled in project/jni/Application.mk.
-    ndk_build_builder = NdkBuildBuilder.from_build_config(
-        PROJECT_PATH, Path(ndk_path), config
-    )
-    try:
-        cmake_builder.build()
-        ndk_build_builder.build()
-    except CalledProcessError as ex:
-        return False, f"Build failed:\n{ex.stdout}"
-    return verify_load_section_alignment_each_file(
-        [cmake_builder.out_dir / "libfoo.so", ndk_build_builder.out_dir / "libfoo.so"],
-        Path(ndk_path),
-        expected_alignment=16 * 1024 if config.abi == Abi("arm64-v8a") else 4 * 1024,
-    )
diff --git a/tests/build/alignment_default/project/CMakeLists.txt b/tests/build/alignment_default/project/CMakeLists.txt
deleted file mode 100644
index 7928b9b..0000000
--- a/tests/build/alignment_default/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(DefaultAlignment LANGUAGES CXX)
-
-add_library(foo SHARED jni/foo.cpp)
diff --git a/tests/build/alignment_default/project/jni/foo.cpp b/tests/build/alignment_default/project/jni/foo.cpp
deleted file mode 100644
index 5bff1b5..0000000
--- a/tests/build/alignment_default/project/jni/foo.cpp
+++ /dev/null
@@ -1,2 +0,0 @@
-extern "C" void foo() {
-}
diff --git a/tests/build/alignment_default/test.py b/tests/build/alignment_default/test.py
deleted file mode 100644
index c5c4b75..0000000
--- a/tests/build/alignment_default/test.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#
-# Copyright (C) 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Tests that binaries are built with the correct max-page-size."""
-from pathlib import Path
-from subprocess import CalledProcessError
-import subprocess
-import re
-from collections.abc import Iterator
-
-from ndk.hosts import Host
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.builders import CMakeBuilder, NdkBuildBuilder
-
-
-PROJECT_PATH = Path("project")
-
-
-def iter_load_alignments(readelf_output: str) -> Iterator[tuple[int, int]]:
-    """Iterates over the offset and alignment of each LOAD section."""
-    # Example output:
-    #
-    #   Type           Offset   VirtAddr           PhysAddr           FileSiz  MemSiz   Flg Align
-    #   PHDR           0x000040 0x0000000000000040 0x0000000000000040 0x0002a0 0x0002a0 R   0x8
-    #   LOAD           0x000000 0x0000000000000000 0x0000000000000000 0x099604 0x099604 R   0x1000
-    pattern = re.compile(r"^\s+LOAD\s+(0x[0-9a-fA-F]+).+(0x[0-9a-fA-F]+)$")
-    for line in readelf_output.splitlines():
-        if "LOAD" not in line:
-            continue
-        if (match := pattern.search(line)) is not None:
-            yield int(match.group(1), base=16), int(match.group(2), base=16)
-        else:
-            raise ValueError(f"Could not parse LOAD line {line}")
-
-
-def verify_load_section_alignment(
-    path: Path, ndk: Path, expected_alignment: int
-) -> tuple[bool, str | None]:
-    """Verifies that each LOAD section in the given file has the correct alignment."""
-    readelf = (
-        ndk / "toolchains/llvm/prebuilt" / Host.current().tag / "bin" / "llvm-readelf"
-    )
-    readelf = readelf.with_suffix(Host.current().exe_suffix)
-    output = subprocess.run(
-        [readelf, "-lW", path], check=True, capture_output=True, text=True
-    ).stdout
-    for offset, alignment in iter_load_alignments(output):
-        if alignment != expected_alignment:
-            return (
-                False,
-                f"LOAD section at {offset:x} has incorrect alignment {alignment:x}. "
-                f"Expected {expected_alignment:x}",
-            )
-    return True, None
-
-
-def verify_load_section_alignment_each_file(
-    paths: list[Path], ndk: Path, expected_alignment: int
-) -> tuple[bool, str | None]:
-    """Verifies that the LOAD section alignment is correct for each given file."""
-    for path in paths:
-        result, text = verify_load_section_alignment(path, ndk, expected_alignment)
-        if not result:
-            return result, text
-    return True, None
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str | None]:
-    """Checks that the binary's LOAD sections have the correct alignment."""
-    cmake_builder = CMakeBuilder.from_build_config(PROJECT_PATH, Path(ndk_path), config)
-    ndk_build_builder = NdkBuildBuilder.from_build_config(
-        PROJECT_PATH, Path(ndk_path), config
-    )
-    try:
-        cmake_builder.build()
-        ndk_build_builder.build()
-    except CalledProcessError as ex:
-        return False, f"Build failed:\n{ex.stdout}"
-    return verify_load_section_alignment_each_file(
-        [cmake_builder.out_dir / "libfoo.so", ndk_build_builder.out_dir / "libfoo.so"],
-        Path(ndk_path),
-        expected_alignment=4 * 1024,
-    )
diff --git a/tests/build/allow_missing_prebuilt/project/jni/Android.mk b/tests/build/allow_missing_prebuilt/project/jni/Android.mk
deleted file mode 100644
index dda5d50..0000000
--- a/tests/build/allow_missing_prebuilt/project/jni/Android.mk
+++ /dev/null
@@ -1,16 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo_static
-LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libfoo.a
-LOCAL_ALLOW_MISSING_PREBUILT := true
-include $(PREBUILT_STATIC_LIBRARY)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libfoo.so
-LOCAL_ALLOW_MISSING_PREBUILT := true
-# Prevent the test from failing because llvm-strip will reject an empty file
-# because it's not ELF.
-LOCAL_STRIP_MODE := none
-include $(PREBUILT_SHARED_LIBRARY)
diff --git a/tests/build/allow_missing_prebuilt/project/jni/Application.mk b/tests/build/allow_missing_prebuilt/project/jni/Application.mk
deleted file mode 100644
index ce09535..0000000
--- a/tests/build/allow_missing_prebuilt/project/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_STL := c++_static
diff --git a/tests/build/allow_missing_prebuilt/project/jni/foo.cpp b/tests/build/allow_missing_prebuilt/project/jni/foo.cpp
deleted file mode 100644
index e276249..0000000
--- a/tests/build/allow_missing_prebuilt/project/jni/foo.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-int main(int argc, char** argv) {
-  return 0;
-}
diff --git a/tests/build/allow_missing_prebuilt/test.py b/tests/build/allow_missing_prebuilt/test.py
deleted file mode 100644
index e4a75f6..0000000
--- a/tests/build/allow_missing_prebuilt/test.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that LOCAL_ALLOW_MISSING_PREBUILT is obeyed."""
-import os
-from pathlib import Path
-import subprocess
-import sys
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-
-
-PROJECT_PATH = Path("project")
-
-
-def ndk_build(
-    ndk_path: str, config: BuildConfiguration, sync_only: bool = False
-) -> tuple[bool, str]:
-    ndk_build_path = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build_path += ".cmd"
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
-    ]
-    if sync_only:
-        ndk_args.append("-n")
-    proc = subprocess.run(
-        [ndk_build_path, "-C", str(PROJECT_PATH)] + ndk_args,
-        check=False,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
-    return proc.returncode == 0, proc.stdout
-
-
-def check_build_fail_if_missing(
-    ndk_path: str, config: BuildConfiguration
-) -> Optional[str]:
-    """Checks that the build fails if the libraries are missing."""
-    success, output = ndk_build(ndk_path, config)
-    if not success:
-        return None
-    return f"Build should have failed because prebuilts are missing:\n{output}"
-
-
-def check_sync_pass_if_missing(
-    ndk_path: str, config: BuildConfiguration
-) -> Optional[str]:
-    """Checks that the build fails if the libraries are missing."""
-    success, output = ndk_build(ndk_path, config, sync_only=True)
-    if success:
-        return None
-    return f"Build should have passed because ran with -n:\n{output}"
-
-
-def check_build_pass_if_present(
-    ndk_path: str, config: BuildConfiguration
-) -> Optional[str]:
-    """Checks that the build fails if the libraries are missing."""
-    prebuilt_dir = PROJECT_PATH / "jni" / config.abi
-    prebuilt_dir.mkdir(parents=True)
-    (prebuilt_dir / "libfoo.a").touch()
-    (prebuilt_dir / "libfoo.so").touch()
-    success, output = ndk_build(ndk_path, config)
-    if success:
-        return None
-    return f"Build should have passed because prebuilts are present:\n{output}"
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
-    """Check that LOCAL_ALLOW_MISSING_PREBUILT is obeyed.
-
-    LOCAL_ALLOW_MISSING_PREBUILT should prevent
-    PREBUILT_SHARED_LIBRARY/PREBUILT_STATIC_LIBRARY modules from failing-fast
-    when the prebuilt is not present. This is sometimes used for AGP projects
-    where the "pre" built is actually built by another module but AGP still
-    needs to sync the gradle project before anything is built. The *build* will
-    still fail if the library doesn't exist by the time it is needed, but
-    that's caused by the failing copy rule.
-    """
-    if (error := check_build_fail_if_missing(ndk_path, config)) is not None:
-        return False, error
-    if (error := check_sync_pass_if_missing(ndk_path, config)) is not None:
-        return False, error
-    if (error := check_build_pass_if_present(ndk_path, config)) is not None:
-        return False, error
-    return True, ""
diff --git a/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py b/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py
index 44fdd67..5bb23a5 100644
--- a/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py
+++ b/tests/build/b14825026-aarch64-FP_LO_REGS/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "arm64-v8a":
+    if test.config.abi != 'arm64-v8a':
         return test.config.abi
     return None
diff --git a/tests/build/b9193874-neon/test_config.py b/tests/build/b9193874-neon/test_config.py
index f765dff..2c0130e 100644
--- a/tests/build/b9193874-neon/test_config.py
+++ b/tests/build/b9193874-neon/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
-    return "clang"
+    return 'clang'
diff --git a/tests/build/branch-protection/__init__.py b/tests/build/branch-protection/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/branch-protection/__init__.py
+++ /dev/null
diff --git a/tests/build/branch-protection/project/jni/Android.mk b/tests/build/branch-protection/project/jni/Android.mk
deleted file mode 100644
index 781e625..0000000
--- a/tests/build/branch-protection/project/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_BRANCH_PROTECTION := standard
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/branch-protection/test.py b/tests/build/branch-protection/test.py
deleted file mode 100644
index 82c7196..0000000
--- a/tests/build/branch-protection/test.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check for branch protection flag support for arm64-v8a."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Checks LOCAL_BRANCH_PROTECTION is propagated for arm64-v8a."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    if config.abi == "arm64-v8a":
-        verifier.expect_flag("-mbranch-protection=standard")
-    else:
-        verifier.expect_not_flag("-mbranch-protection=standard")
-    return verifier.verify_ndk_build().make_test_result_tuple()
diff --git a/tests/build/build-assembly-file/CMakeLists.txt b/tests/build/build-assembly-file/CMakeLists.txt
index 0ba49dc..bb5f554 100644
--- a/tests/build/build-assembly-file/CMakeLists.txt
+++ b/tests/build/build-assembly-file/CMakeLists.txt
@@ -6,10 +6,10 @@
   set(TEST_SOURCES jni/assembly1.s jni/assembly2.S)
 elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL i686)
   set(TEST_SOURCES jni/assembly-x86.S)
+elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL mips)
+  set(TEST_SOURCES jni/assembly-mips.S)
 endif()
 
-if(DEFINED TEST_SOURCES)
-  add_library(test_build_assembly SHARED ${TEST_SOURCES})
-  # Without C or C++ files, CMake doesn't know which linker to use.
-  set_target_properties(test_build_assembly PROPERTIES LINKER_LANGUAGE C)
-endif()
+add_library(test_build_assembly SHARED ${TEST_SOURCES})
+# Without C or C++ files, CMake doesn't know which linker to use.
+set_target_properties(test_build_assembly PROPERTIES LINKER_LANGUAGE C)
diff --git a/tests/build/build-assembly-file/jni/Android.mk b/tests/build/build-assembly-file/jni/Android.mk
index 486bd7f..1dd2c55 100644
--- a/tests/build/build-assembly-file/jni/Android.mk
+++ b/tests/build/build-assembly-file/jni/Android.mk
@@ -7,6 +7,10 @@
 else
     ifeq ($(TARGET_ARCH),x86)
         LOCAL_SRC_FILES := assembly-x86.S
+    else
+        ifeq ($(TARGET_ARCH),mips)
+            LOCAL_SRC_FILES := assembly-mips.S
+        endif
     endif
 endif
 include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/build-assembly-file/jni/assembly-mips.S b/tests/build/build-assembly-file/jni/assembly-mips.S
new file mode 100644
index 0000000..83907ff
--- /dev/null
+++ b/tests/build/build-assembly-file/jni/assembly-mips.S
@@ -0,0 +1,9 @@
+       .text
+       .align  2
+       .ent    foo
+       .globl  foo
+       .set    noreorder
+foo:
+       jr      $31
+       nop
+       .end    foo
diff --git a/tests/build/build-mode-cmake/CMakeLists.txt b/tests/build/build-mode-cmake/CMakeLists.txt
index 79df84d..4c948da 100644
--- a/tests/build/build-mode-cmake/CMakeLists.txt
+++ b/tests/build/build-mode-cmake/CMakeLists.txt
@@ -2,6 +2,10 @@
 
 include(ExternalProject)
 
+find_program(NINJA_PATH
+             NAMES ninja
+             DOC "Path to ninja binary")
+
 function(add_test name)
   cmake_parse_arguments(ARG
                         "CHECK_NEON;CHECK_ARM;CHECK_THUMB2;CHECK_X86"
@@ -17,7 +21,7 @@
           -DCHECK_ARM=${ARG_CHECK_ARM}
           -DCHECK_THUMB2=${ARG_CHECK_THUMB2}
           -DCHECK_X86=${ARG_CHECK_X86}
-          -DCMAKE_MAKE_PROGRAM=${CMAKE_MAKE_PROGRAM}
+          -DCMAKE_MAKE_PROGRAM=${NINJA_PATH}
     )
   if(DEFINED ARG_NEON)
     list(APPEND args -DANDROID_ARM_NEON=${ARG_NEON})
@@ -46,6 +50,20 @@
           ARM_MODE arm
           CHECK_NEON CHECK_ARM)
 
+  add_test(test_build_mode_no_neon
+          NEON OFF
+          CHECK_THUMB2)
+
+  add_test(test_build_mode_thumb2_no_neon
+          ARM_MODE thumb
+          NEON OFF
+          CHECK_THUMB2)
+
+  add_test(test_build_mode_armv7_no_neon
+          ARM_MODE arm
+          NEON OFF
+          CHECK_ARM)
+
   add_test(test_build_mode_neon
           NEON ON
           CHECK_NEON CHECK_THUMB2)
diff --git a/tests/build/build-mode-cmake/jni/main.c b/tests/build/build-mode-cmake/jni/main.c
index 7a56455..6e5fa9c 100644
--- a/tests/build/build-mode-cmake/jni/main.c
+++ b/tests/build/build-mode-cmake/jni/main.c
@@ -32,6 +32,10 @@
 #  ifndef __i386__
 #    error "This source file should be compiled with an x86 toolchain"
 #  endif
+#elif defined(CHECK_MIPS)
+#  ifndef __mips__
+#    error "This source file should be compiled with a MIPS toolchain"
+#  endif
 #else
 #  error "This unit test is broken!"
 #endif
diff --git a/tests/build/build-mode/jni/Android.mk b/tests/build/build-mode/jni/Android.mk
index 8daa807..f804d19 100644
--- a/tests/build/build-mode/jni/Android.mk
+++ b/tests/build/build-mode/jni/Android.mk
@@ -1,8 +1,40 @@
 LOCAL_PATH := $(call my-dir)
 
+# We build 8 armeabi-v7a binaries because we need to check neon as well
+#
 ifneq ($(filter $(TARGET_ARCH_ABI), armeabi-v7a),)
 
 include $(CLEAR_VARS)
+LOCAL_MODULE := test_build_mode_thumb2
+LOCAL_CFLAGS += -DCHECK_THUMB2
+LOCAL_SRC_FILES := main.c
+LOCAL_ARM_NEON := false
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := test_build_mode_thumb2_b
+LOCAL_CFLAGS += -DCHECK_THUMB2
+LOCAL_SRC_FILES := main.c
+LOCAL_ARM_MODE := thumb
+LOCAL_ARM_NEON := false
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := test_build_mode_armv7
+LOCAL_CFLAGS += -DCHECK_ARM
+LOCAL_SRC_FILES := main.c.arm
+LOCAL_ARM_NEON := false
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := test_build_mode_armv7_b
+LOCAL_CFLAGS += -DCHECK_ARM
+LOCAL_SRC_FILES := main.c
+LOCAL_ARM_MODE := arm
+LOCAL_ARM_NEON := false
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
 LOCAL_MODULE := test_build_mode_thumb2_neon
 LOCAL_CFLAGS += -DCHECK_THUMB2 -DCHECK_NEON
 LOCAL_SRC_FILES := main.c.neon
diff --git a/tests/build/build-mode/jni/main.c b/tests/build/build-mode/jni/main.c
index 138388b..ca83190 100644
--- a/tests/build/build-mode/jni/main.c
+++ b/tests/build/build-mode/jni/main.c
@@ -29,6 +29,10 @@
 #  ifndef __i386__
 #    error "This source file should be compiled with an x86 toolchain"
 #  endif
+#elif defined(CHECK_MIPS)
+#  ifndef __mips__
+#    error "This source file should be compiled with a MIPS toolchain"
+#  endif
 #else
 #  error "This unit test is broken!"
 #endif
diff --git a/tests/build/build_id/project/CMakeLists.txt b/tests/build/build_id/project/CMakeLists.txt
deleted file mode 100644
index b36b8aa..0000000
--- a/tests/build/build_id/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.10)
-project(build_id)
-
-add_library(foo SHARED jni/foo.cpp)
diff --git a/tests/build/build_id/project/jni/Android.mk b/tests/build/build_id/project/jni/Android.mk
deleted file mode 100644
index 365bbad..0000000
--- a/tests/build/build_id/project/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/build_id/test.py b/tests/build/build_id/test.py
deleted file mode 100644
index 21cd157..0000000
--- a/tests/build/build_id/test.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that --build-id is set appropriately for each linker.
-
-https://github.com/android/ndk/issues/885
-
-We need to use --build-id=sha1 with LLD until there's a new LLDB available in
-Studio.
-"""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Checks correct --build-id use."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.expect_flag("-Wl,--build-id=sha1")
-    verifier.expect_not_flag("-Wl,--build-id")
-    return verifier.verify().make_test_result_tuple()
diff --git a/tests/build/c++-stl-source-extensions/test_config.py b/tests/build/c++-stl-source-extensions/test_config.py
index 60124e4..8da334a 100644
--- a/tests/build/c++-stl-source-extensions/test_config.py
+++ b/tests/build/c++-stl-source-extensions/test_config.py
@@ -4,7 +4,7 @@
 
 
 def build_unsupported(_test):
-    if platform.system() == "Windows":
+    if platform.system() == 'Windows':
         # This test is specifically checking that we can handle all the
         # different C++ source extensions, including those that differ only by
         # case. Windows is case insensitive, so this test fails hard.
diff --git a/tests/build/check-armeabi-v7a-prebuilts/build.sh b/tests/build/check-armeabi-v7a-prebuilts/build.sh
new file mode 100755
index 0000000..d350c29
--- /dev/null
+++ b/tests/build/check-armeabi-v7a-prebuilts/build.sh
@@ -0,0 +1,261 @@
+#!/bin/sh
+
+# The purpose of this dummy build test is to ensure that all the
+# armeabi-v7a prebuilt binaries distributed with the NDK were
+# properly built targetting VFPv3-D16, as per the ABI spec.
+#
+# For a related bug, see http://code.google.com/p/android/issues/detail?id=26199
+#
+
+#
+# $1: ELF binary
+# $2: Tag name (e.g. Tag_CPU_name)
+#
+extract_arch_tag ()
+{
+    echo $($ARM_READELF -A "$1" | awk '$1 == "'$2':" { print $2; }' | sort -u | tr '\n' ' ')
+}
+
+# Returns success only if a file is a static object or library.
+# We simply check the suffix, which must be either .a or .o
+# $1: file name
+is_static_file ()
+{
+    case $1 in
+        *.o|*.a)
+            return 0
+            ;;
+    esac
+    return 1
+}
+
+
+#
+# WARNING: VERY IMPORTANT TECHNICAL NOTE:
+#
+# The function below works by inspecting the architecture-specific
+# attributes in an ELF file. Please be aware that the behaviour of
+# binutils-2.19 and binutils-2.21 is different when generating these
+# tags.
+#
+# 1/ When compiling for ARMv7-A targets, one can use any of the following
+#    labels for the -mfpu=<name> option:
+#
+#        vfp
+#        vfpv3
+#        vfpv3-d16
+#        neon
+#
+# 2/ There are two VFPv3 architectures defined by ARM:
+#
+#        VFPv3-D16  -> Mandates only 16 double FPU registers (d0-d15)
+#        VFPv3-D32  -> Mandates 32 double FPU registers (d0-d31)
+#
+#    In addition, NEON requires VFPv3-D32
+#
+#    There is also VFPv2, which is an earlier version of VFPv3. Technically
+#    speaking, VFPv3 is not completely backwards compatible with VFPv2 because
+#    there are a few VFPv2 instructions it doesn't support.
+#
+# 3/ The table below indicates, for each -mfpu label, the following:
+#
+#     - The value of the 'Tag_VFP_arch' attribute that will be placed in
+#       the generated object files or binaries (you can list them with
+#       'readelf -A <file>')
+#
+#     - Whether the generated code uses 16 or 32 FPU double registers
+#       (this is checked by looking at the disassembly of libgnustl_shared.so,
+#       more specifically functions like 'cosf' or 'sinf' inside it).
+#
+#  First, for binutils-2.19:
+#
+#     fpu value           EABI tag          FPU reg count
+#    -----------------------------------------------------
+#       vfp                 VFPv2            16
+#       vfpv3               VFPv3-D16        32 (*)
+#       vfpv3-d16           VFPv3            16 (*)
+#       neon                VFPv3            32
+#
+#  And now for binutils-2.21
+#
+#     fpu value           EABI tag          FPU reg count
+#    -----------------------------------------------------
+#       vfp                 VFPv2            16
+#       vfpv3               VFPv3            32
+#       vfpv3-d16           VFPv3-D16        16
+#       neon                VFPv3            32
+#
+#  This shows that:
+#
+#    - The 'VFPv3' tag seems to match VFPv3-D32 exclusively on 2.21,
+#      but is a mess with 2.19
+#
+#    - Similarly, the 'vfpv3' value seems to match VFPv3-D32 as well,
+#      with the exception that binutils-2.19 is buggy and will put an
+#      invalid tag (VFPv3-D16, instead of VFPv3) in the generate ELF file.
+#
+#    - binutils 2.19 puts the wrong tag in the executable for vfpv3 and
+#      vfpv3-d16, then should probably be inverted!
+#
+#  The end result is that we can't use the EABI tag to determine the number
+#  of hardware FPU registers that are really used by the machine code with
+#  binutils 2.19 :-(
+#
+#  BONUS:
+#
+#    - When using 'neon', binutils-2.21 will also add a new tag named
+#      'Tag_Advanced_SIMD_arch' with value 'NEONv1'. Sadly, binutils-2.19
+#      doesn't do any of this.
+#
+
+# Check that an ELF binary is compatible with our armeabi-v7a ABI
+# (i.e. no NEON, and only 16 hardware registers being used).
+#
+# See technical note above to understand how this currently works.
+# We're still assuming the toolchain is built with the buggy binutils-2.19.
+#
+# $1: path to an ARMv7-A ELF binary (static lib, shared lib or executable)
+#
+check_armv7_elf_binary ()
+{
+    # We use a small awk script to parse the output of 'readelf -A'
+    # Which typically looks like:
+    #
+    # Attribute Section: aeabi
+    #   File Attributes
+    #   Tag_CPU_name: "7-A"
+    #   Tag_CPU_arch: v7
+    #   Tag_CPU_arch_profile: Application
+    #   Tag_ARM_ISA_use: Yes
+    #   Tag_THUMB_ISA_use: Thumb-2
+    #   Tag_VFP_arch: VFPv3-D16
+    #   Tag_ABI_PCS_wchar_t: 4
+    #   Tag_ABI_FP_denormal: Needed
+    #   Tag_ABI_FP_exceptions: Needed
+    #   Tag_ABI_FP_number_model: IEEE 754
+    #   Tag_ABI_align8_needed: Yes
+    #   Tag_ABI_align8_preserved: Yes, except leaf SP
+    #   Tag_ABI_enum_size: int
+    #   Tag_ABI_HardFP_use: SP and DP
+    #   Tag_ABI_optimization_goals: Aggressive Speed
+    #   Tag_unknown_44: 1 (0x1)
+    #
+    # Note that for static libraries, these sections will appear multiple
+    # time in the output of 'readelf -A'.
+
+    echo "Checking: $(basename $1)"
+    if [ ! -f "$1" ]; then
+        1>&2 echo "PANIC: Missing binary: $1"
+        exit 1
+    fi
+
+    # We want to check the values of Tag_CPU_name
+    CPU_NAMES=$(extract_arch_tag "$1" Tag_CPU_name)
+    VFP_ARCHS=$(extract_arch_tag "$1" Tag_VFP_arch)
+    NEON_ARCHS=$(extract_arch_tag "$1" Tag_Advanced_SIMD_arch)
+
+    # IMPORTANT NOTE: Even when using -march=armv7-a, the compiler may not
+    # necessarily use ARMv7-A specific instruction and will tag an object file
+    # with the following attributes:
+    #
+    # Attribute Section: aeabi
+    #   File Attributes
+    #   Tag_CPU_name: "5TE"
+    #   Tag_CPU_arch: v5TE
+    #   Tag_ARM_ISA_use: Yes
+    #   Tag_THUMB_ISA_use: Thumb-1
+    #   Tag_ABI_PCS_wchar_t: 4
+    #   Tag_ABI_FP_denormal: Needed
+    #   Tag_ABI_FP_exceptions: Needed
+    #   Tag_ABI_FP_number_model: IEEE 754
+    #   Tag_ABI_align8_needed: Yes
+    #   Tag_ABI_align8_preserved: Yes, except leaf SP
+    #   Tag_ABI_enum_size: int
+    #   Tag_ABI_optimization_goals: Aggressive Speed
+    #   Tag_unknown_44: 1 (0x1)
+    #
+    # This means that in static libraries, you can have both
+    # '5TE' and '7-A' CPU name tags at the same time, or only
+    # '5TE' or only '7-A', deal with all these cases properly.
+
+    echo "  found tags: CPU names:'$CPU_NAMES' VFP:'$VFP_ARCHS' NEON:'$NEON_ARCHS'"
+
+    if is_static_file "$1"; then
+        # For static libraries / object files, it's ok to contain ARMv5TE binaries
+        if [ "$CPU_NAMES" == "\"5TE\"" -a "$CPU_NAMES" != "\"7-A\"" -a "$CPU_NAMES" != "\"5TE\" \"7-A\"" ]; then
+            # Neither ARMv7-A or ARMv5TE+ARMv7-A, something's fishy
+            1>&2 echo "PANIC: File is neither ARMv5TE or ARMv7-A binary: $1"
+            exit 1
+        fi
+
+        # exit here because some static libraries can have a mix of several
+        # VFP tags that make them difficult to check (e.g. libgnustl_static.a
+        # can have 'VFPv1 VFPv2 VFPv3' at the same time :-(
+        return
+    fi
+
+    # If we reach this point, we only contain ARMv7-A machine code, so look
+    # at the VFP arch tag(s)
+
+    # Sometimes no VFP_arch tag is placed in the final binary, this happens
+    # with libgabi++_shared.so for example, because the code doesn't have
+    # any floating point instructions.
+    #
+
+    # XXX: FOR NOW, ASSUME BROKEN binutils-2.19, AND THUS THAT 'VFPv3' IS VALID
+
+    if [ "$VFP_ARCHS" != "VFPv3" -a "$VFP_ARCHS" != "VFPv3-D16" -a "$VFP_ARCHS" != "" ]; then
+        1>&2 echo "PANIC: File is not a VFPv3-D16 binary: $1"
+        exit 1
+    fi
+}
+
+export ANDROID_NDK_ROOT=$NDK
+
+NDK_BUILDTOOLS_PATH=$NDK/build/tools
+. $NDK/build/tools/prebuilt-common.sh
+
+if [ -n "$APP_ABI" ]; then
+    if [ "$(convert_abi_to_arch $APP_ABI)" != "arm" ]; then
+        echo "Skipping ARM only test"
+        exit 0
+    fi
+fi
+
+ARM_TOOLCHAIN_NAME=$(get_default_toolchain_name_for_arch arm)
+ARM_TOOLCHAIN_PREFIX=$(get_default_toolchain_prefix_for_arch arm)
+
+case $(uname -s) in
+    Darwin)
+      HOST_ARCH=`uname -m`
+      case "$HOST_ARCH" in
+          i?86) HOST_ARCH=x86
+              if ! echo __LP64__ | (CCOPTS= gcc -E - 2>/dev/null) | grep -q __LP64__ ; then
+                  HOST_ARCH=x86_64
+              fi
+              ;;
+      esac
+      HOST_TAG=darwin-$HOST_ARCH
+      ;;
+    Linux)
+      HOST_TAG=linux-$(uname -m)
+      ;;
+    *)
+      echo "WARNING: This test cannot run on this machine!" >&2
+      exit 0
+      ;;
+esac
+
+ARM_READELF=$NDK/toolchains/$ARM_TOOLCHAIN_NAME/prebuilt/$HOST_TAG/bin/${ARM_TOOLCHAIN_PREFIX}-readelf
+if [ ! -f "$ARM_READELF" ]; then
+    echo "ERROR: Missing binary: $ARM_READELF" >&2
+    exit 1
+fi
+
+LIBCXX_LIBS=$NDK/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a
+check_armv7_elf_binary $LIBCXX_LIBS/libandroid_support.a
+check_armv7_elf_binary $LIBCXX_LIBS/libc++_shared.so
+check_armv7_elf_binary $LIBCXX_LIBS/libc++_static.a
+check_armv7_elf_binary $LIBCXX_LIBS/libc++abi.a
+
+echo "Done!"
diff --git a/tests/build/check-armeabi-v7a-prebuilts/jni/Android.mk b/tests/build/check-armeabi-v7a-prebuilts/jni/Android.mk
new file mode 100644
index 0000000..2f42ab6
--- /dev/null
+++ b/tests/build/check-armeabi-v7a-prebuilts/jni/Android.mk
@@ -0,0 +1 @@
+LOCAL_PATH:= $(call my-dir)
diff --git a/tests/build/check-armeabi-v7a-prebuilts/jni/Application.mk b/tests/build/check-armeabi-v7a-prebuilts/jni/Application.mk
new file mode 100644
index 0000000..850a82e
--- /dev/null
+++ b/tests/build/check-armeabi-v7a-prebuilts/jni/Application.mk
@@ -0,0 +1 @@
+APP_ABI := armeabi-v7a
diff --git a/tests/build/clang_tidy/test.py b/tests/build/clang_tidy/test.py
index 04e4376..acc3e86 100644
--- a/tests/build/clang_tidy/test.py
+++ b/tests/build/clang_tidy/test.py
@@ -18,27 +18,23 @@
 import subprocess
 import sys
 
-from ndk.test.spec import BuildConfiguration
 
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, platform, linker, build_flags):
     """Checks ndk-build V=1 output for clang-tidy warnings."""
-    ndk_build = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = "project"
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
-        "V=1",
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
+        'V=1',
     ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", project_path] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     out, _ = proc.communicate()
+    out = out.decode('utf-8')
     if proc.returncode != 0:
         return proc.returncode == 0, out
 
diff --git a/tests/build/cmake-ANDROID_EXCEPTIONS/CMakeLists.txt b/tests/build/cmake-ANDROID_EXCEPTIONS/CMakeLists.txt
deleted file mode 100644
index 16704b9..0000000
--- a/tests/build/cmake-ANDROID_EXCEPTIONS/CMakeLists.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-cmake_minimum_required(VERSION 3.6.0)
-add_executable(foo foo.cpp)
\ No newline at end of file
diff --git a/tests/build/cmake-ANDROID_EXCEPTIONS/foo.cpp b/tests/build/cmake-ANDROID_EXCEPTIONS/foo.cpp
deleted file mode 100644
index b9a3bc4..0000000
--- a/tests/build/cmake-ANDROID_EXCEPTIONS/foo.cpp
+++ /dev/null
@@ -1,8 +0,0 @@
-int main(int argc, char** argv) {
-  try {
-    throw 42;
-  } catch (const int& ex) {
-    return ex;
-  }
-  return 0;
-}
\ No newline at end of file
diff --git a/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py b/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py
deleted file mode 100644
index 4158f7d..0000000
--- a/tests/build/cmake-ANDROID_EXCEPTIONS/test_config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def extra_cmake_flags():
-    return ["-DANDROID_CPP_FEATURES=no-exceptions"]
-
-
-def is_negative_test():
-    return True
diff --git a/tests/build/cmake-libc++-shared/test_config.py b/tests/build/cmake-libc++-shared/test_config.py
index 3fa7cd6..6458cd2 100644
--- a/tests/build/cmake-libc++-shared/test_config.py
+++ b/tests/build/cmake-libc++-shared/test_config.py
@@ -6,4 +6,4 @@
 
 
 def extra_cmake_flags():  # pylint: disable=missing-docstring
-    return ["-DANDROID_STL=c++_shared"]
+    return ['-DANDROID_STL=c++_shared']
diff --git a/tests/build/cmake-neon/test_config.py b/tests/build/cmake-neon/test_config.py
index 559ff78..0cedec6 100644
--- a/tests/build/cmake-neon/test_config.py
+++ b/tests/build/cmake-neon/test_config.py
@@ -1,8 +1,8 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
 
 
 def extra_cmake_flags():
-    return ["-DANDROID_ARM_NEON=TRUE"]
+    return ['-DANDROID_ARM_NEON=TRUE']
diff --git a/tests/build/cmake-response-file/test_config.py b/tests/build/cmake-response-file/test_config.py
index d2c6e11..e8bf50c 100644
--- a/tests/build/cmake-response-file/test_config.py
+++ b/tests/build/cmake-response-file/test_config.py
@@ -1,2 +1,2 @@
 def extra_cmake_flags():
-    return ["-DCMAKE_NINJA_FORCE_RESPONSE_FILE=TRUE"]
+    return ['-DCMAKE_NINJA_FORCE_RESPONSE_FILE=TRUE']
diff --git a/tests/build/cmake_arm_mode/CMakeLists.txt b/tests/build/cmake_arm_mode/CMakeLists.txt
deleted file mode 100644
index a12c5e7..0000000
--- a/tests/build/cmake_arm_mode/CMakeLists.txt
+++ /dev/null
@@ -1,15 +0,0 @@
-cmake_minimum_required(VERSION 3.22)
-project(ArmMode CXX)
-
-if(CMAKE_ANDROID_ARCH_ABI STREQUAL "armeabi-v7a")
-  if(NOT DEFINED CMAKE_ANDROID_ARM_MODE)
-    message(FATAL_ERROR "CMAKE_ANDROID_ARM_MODE should be set for armeabi-v7a")
-  endif()
-else()
-  if(DEFINED CMAKE_ANDROID_ARM_MODE)
-    message(
-      FATAL_ERROR
-      "CMAKE_ANDROID_ARM_MODE should not be set for non armeabi-v7a"
-    )
-  endif()
-endif()
diff --git a/tests/build/cmake_arm_mode/__init__.py b/tests/build/cmake_arm_mode/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/cmake_arm_mode/__init__.py
+++ /dev/null
diff --git a/tests/build/cmake_arm_mode/test_config.py b/tests/build/cmake_arm_mode/test_config.py
deleted file mode 100644
index 1745668..0000000
--- a/tests/build/cmake_arm_mode/test_config.py
+++ /dev/null
@@ -1,2 +0,0 @@
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_ARM_MODE=thumb"]
diff --git a/tests/build/cmake_default_flags/__init__.py b/tests/build/cmake_default_flags/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/cmake_default_flags/__init__.py
+++ /dev/null
diff --git a/tests/build/cmake_default_flags/project/CMakeLists.txt b/tests/build/cmake_default_flags/project/CMakeLists.txt
deleted file mode 100644
index 4cf4562..0000000
--- a/tests/build/cmake_default_flags/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(CMakeDefaultFlagsTest CXX)
-
-add_library(foo SHARED foo.cpp)
diff --git a/tests/build/cmake_default_flags/project/foo.cpp b/tests/build/cmake_default_flags/project/foo.cpp
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/cmake_default_flags/project/foo.cpp
+++ /dev/null
diff --git a/tests/build/cmake_default_flags/test.py b/tests/build/cmake_default_flags/test.py
deleted file mode 100644
index 3cab3f2..0000000
--- a/tests/build/cmake_default_flags/test.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that the CMake toolchain uses the correct default flags."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier, FlagVerifierResult
-
-
-def check_configuration(
-    ndk_path: str,
-    build_config: BuildConfiguration,
-    cmake_config: str,
-    expected_flags: list[str],
-    unexpected_flags: list[str],
-) -> FlagVerifierResult:
-    verifier = FlagVerifier(
-        Path("project"), Path(ndk_path), build_config
-    ).with_cmake_flag(f"-DCMAKE_BUILD_TYPE={cmake_config}")
-    for flag in expected_flags:
-        verifier.expect_flag(flag)
-    for flag in unexpected_flags:
-        verifier.expect_not_flag(flag)
-    return verifier.verify_cmake()
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Check that the CMake toolchain uses the correct default flags."""
-    verify_configs: dict[str, tuple[list[str], list[str]]] = {
-        # No flag is the same as -O0. As long as no other opt flag is used, the default
-        # is fine.
-        "Debug": ([], ["-O1", "-O2", "-O3", "-Os", "-Oz"]),
-        "MinSizeRel": (["-Os"], ["-O0", "-O1", "-O2", "-O3", "-Oz"]),
-        "Release": (["-O3"], ["-O0", "-O1", "-O2", "-Os", "-Oz"]),
-        "RelWithDebInfo": (["-O2"], ["-O0", "-O1", "-O3", "-Os", "-Oz"]),
-    }
-    for cmake_config, (expected_flags, unexpected_flags) in verify_configs.items():
-        result = check_configuration(
-            ndk_path, config, cmake_config, expected_flags, unexpected_flags
-        )
-        if result.failed():
-            return result.make_test_result_tuple()
-    return result.make_test_result_tuple()
diff --git a/tests/build/cmake_exports/CMakeLists.txt b/tests/build/cmake_exports/CMakeLists.txt
deleted file mode 100644
index f23a5e5..0000000
--- a/tests/build/cmake_exports/CMakeLists.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-cmake_minimum_required(VERSION 3.22)
-project(CMakeExportsTest C CXX)
-
-foreach(TEST_VAR CMAKE_C_COMPILER CMAKE_CXX_COMPILER CMAKE_AR CMAKE_STRIP CMAKE_RANLIB)
-    if(NOT DEFINED "${TEST_VAR}")
-        message(FATAL_ERROR "${TEST_VAR} not set")
-    elseif(NOT ${TEST_VAR} MATCHES "${CMAKE_ANDROID_NDK}")
-        message(FATAL_ERROR "${TEST_VAR} (${${TEST_VAR}}) is outside the NDK (${CMAKE_ANDROID_NDK})")
-    else()
-        message(WARNING "${TEST_VAR} is ${${TEST_VAR}}")
-    endif()
-endforeach()
diff --git a/tests/build/cmake_find_root_path/test_config.py b/tests/build/cmake_find_root_path/test_config.py
index 47a215c..cb951ef 100644
--- a/tests/build/cmake_find_root_path/test_config.py
+++ b/tests/build/cmake_find_root_path/test_config.py
@@ -2,4 +2,4 @@
 
 
 def extra_cmake_flags() -> List[str]:
-    return ["-DCMAKE_FIND_ROOT_PATH=foobar"]
+    return ['-DCMAKE_FIND_ROOT_PATH=foobar']
diff --git a/tests/build/cmake_not_mingw/CMakeLists.txt b/tests/build/cmake_not_mingw/CMakeLists.txt
deleted file mode 100644
index 4f87b83..0000000
--- a/tests/build/cmake_not_mingw/CMakeLists.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(CMakeNotMinGW C CXX ASM)
-
-if(DEFINED MINGW)
-  message(FATAL_ERROR "MINGW should not be defined")
-endif()
diff --git a/tests/build/cmake_search_order/CMakeLists.txt b/tests/build/cmake_search_order/CMakeLists.txt
deleted file mode 100644
index d353980..0000000
--- a/tests/build/cmake_search_order/CMakeLists.txt
+++ /dev/null
@@ -1,21 +0,0 @@
-# https://github.com/android/ndk/issues/929
-# Tests that the first libdl found by find_library is the shared library rather
-# than the static library.
-cmake_minimum_required(VERSION 3.6.0)
-project(cmake_search_order)
-
-message("System library path: ${CMAKE_SYSTEM_LIBRARY_PATH}")
-message("System prefix path: ${CMAKE_SYSTEM_PREFIX_PATH}")
-find_library(LIBDL dl)
-if(NOT LIBDL)
-  message(FATAL_ERROR "libdl not found.")
-endif()
-
-if(LIBDL MATCHES ".a$")
-  message(FATAL_ERROR "found libdl.a")
-endif()
-
-find_program(YASM yasm)
-if(NOT YASM)
-  message(FATAL_ERROR "yasm not found")
-endif()
diff --git a/tests/build/cmake_system_stl/test_config.py b/tests/build/cmake_system_stl/test_config.py
index c37c8e7..544146c 100644
--- a/tests/build/cmake_system_stl/test_config.py
+++ b/tests/build/cmake_system_stl/test_config.py
@@ -1,2 +1,4 @@
 def extra_cmake_flags():
-    return ["-DANDROID_STL=system", "-DANDROID_CPP_FEATURES=no-rtti no-exceptions"]
+    return [
+        '-DANDROID_STL=system', '-DANDROID_CPP_FEATURES=no-rtti no-exceptions'
+    ]
diff --git a/tests/build/cmake_toolchain_defaults/__init__.py b/tests/build/cmake_toolchain_defaults/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/cmake_toolchain_defaults/__init__.py
+++ /dev/null
diff --git a/tests/build/cmake_toolchain_defaults/project/CMakeLists.txt b/tests/build/cmake_toolchain_defaults/project/CMakeLists.txt
deleted file mode 100644
index 350b9bf..0000000
--- a/tests/build/cmake_toolchain_defaults/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(ToolchainDefaultsTest CXX)
-
-add_library(foo SHARED foo.cpp)
diff --git a/tests/build/cmake_toolchain_defaults/project/foo.cpp b/tests/build/cmake_toolchain_defaults/project/foo.cpp
deleted file mode 100644
index 83e1781..0000000
--- a/tests/build/cmake_toolchain_defaults/project/foo.cpp
+++ /dev/null
@@ -1,8 +0,0 @@
-#if !defined(__ARM_ARCH_7A__)
-#error ABI did not default to armeabi-v7a
-#endif
-
-// Update this whenever we raise the minimum API level in the NDK.
-#if __ANDROID_API__ != 19
-#error API level did not default to 19
-#endif
diff --git a/tests/build/cmake_toolchain_defaults/test.py b/tests/build/cmake_toolchain_defaults/test.py
deleted file mode 100644
index 8224988..0000000
--- a/tests/build/cmake_toolchain_defaults/test.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#
-# Copyright (C) 2021 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that the default CMake toolchain behavior works."""
-from pathlib import Path
-import subprocess
-
-from ndk.cmake import find_cmake, find_ninja
-from ndk.test.spec import BuildConfiguration, CMakeToolchainFile
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
-    """Check that the default CMake toolchain behavior works.
-
-    All our regular CMake tests pass the API level and ABI explicitly. This
-    test checks that the defaults (armeabi-v7a, minimum supported API level)
-    work.
-    """
-    cmake = find_cmake()
-    ninja = find_ninja()
-    toolchain_path = Path(ndk_path) / "build/cmake/android.toolchain.cmake"
-    project_path = "project"
-    if config.toolchain_file is CMakeToolchainFile.Legacy:
-        toolchain_mode = "ON"
-    else:
-        toolchain_mode = "OFF"
-    cmake_cmd = [
-        str(cmake),
-        f"-DCMAKE_TOOLCHAIN_FILE={toolchain_path}",
-        f"-DCMAKE_MAKE_PROGRAM={ninja}",
-        f"-DANDROID_USE_LEGACY_TOOLCHAIN_FILE={toolchain_mode}",
-        "-GNinja",
-    ]
-    result = subprocess.run(
-        cmake_cmd, check=False, cwd=project_path, capture_output=True, text=True
-    )
-    return result.returncode == 0, result.stdout
diff --git a/tests/build/cmake_toolchain_defaults/test_config.py b/tests/build/cmake_toolchain_defaults/test_config.py
deleted file mode 100644
index 3c08c53..0000000
--- a/tests/build/cmake_toolchain_defaults/test_config.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from typing import Optional
-
-from ndk.abis import LP32_ABIS
-from ndk.test.buildtest.case import Test
-
-
-def build_unsupported(test: Test) -> Optional[str]:
-    if test.config.abi in LP32_ABIS:
-        return test.config.abi
-    return None
diff --git a/tests/build/cortex-a53-835769/test_config.py b/tests/build/cortex-a53-835769/test_config.py
index 44fdd67..5bb23a5 100644
--- a/tests/build/cortex-a53-835769/test_config.py
+++ b/tests/build/cortex-a53-835769/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "arm64-v8a":
+    if test.config.abi != 'arm64-v8a':
         return test.config.abi
     return None
diff --git a/tests/build/disabling_neon_is_error/CMakeLists.txt b/tests/build/disabling_neon_is_error/CMakeLists.txt
deleted file mode 100644
index 33f5e97..0000000
--- a/tests/build/disabling_neon_is_error/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(DisablingNeonCausesError LANGUAGES CXX)
-
-add_executable(foo jni/foo.cpp)
diff --git a/tests/build/disabling_neon_is_error/jni/Android.mk b/tests/build/disabling_neon_is_error/jni/Android.mk
deleted file mode 100644
index ee12d8b..0000000
--- a/tests/build/disabling_neon_is_error/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_ARM_NEON := false
-include $(BUILD_EXECUTABLE)
diff --git a/tests/build/disabling_neon_is_error/jni/foo.cpp b/tests/build/disabling_neon_is_error/jni/foo.cpp
deleted file mode 100644
index aa8a4ea..0000000
--- a/tests/build/disabling_neon_is_error/jni/foo.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-int main(int, char**) {
-  return 0;
-}
diff --git a/tests/build/disabling_neon_is_error/test_config.py b/tests/build/disabling_neon_is_error/test_config.py
deleted file mode 100644
index 1683232..0000000
--- a/tests/build/disabling_neon_is_error/test_config.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from typing import Optional
-
-from ndk.abis import Abi
-from ndk.test.buildtest.case import Test
-
-
-def build_unsupported(test: Test) -> Optional[str]:
-    if test.config.abi != Abi("armeabi-v7a"):
-        return test.config.abi
-    return None
-
-
-def is_negative_test() -> bool:
-    return True
-
-
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_ARM_NEON=OFF"]
diff --git a/tests/build/enabling_neon_is_okay/CMakeLists.txt b/tests/build/enabling_neon_is_okay/CMakeLists.txt
deleted file mode 100644
index 26c3a38..0000000
--- a/tests/build/enabling_neon_is_okay/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(EnablingNeonCausesNoError LANGUAGES CXX)
-
-add_executable(foo jni/foo.cpp)
diff --git a/tests/build/enabling_neon_is_okay/jni/Android.mk b/tests/build/enabling_neon_is_okay/jni/Android.mk
deleted file mode 100644
index 640ab94..0000000
--- a/tests/build/enabling_neon_is_okay/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_ARM_NEON := true
-include $(BUILD_EXECUTABLE)
diff --git a/tests/build/enabling_neon_is_okay/jni/foo.cpp b/tests/build/enabling_neon_is_okay/jni/foo.cpp
deleted file mode 100644
index aa8a4ea..0000000
--- a/tests/build/enabling_neon_is_okay/jni/foo.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-int main(int, char**) {
-  return 0;
-}
diff --git a/tests/build/enabling_neon_is_okay/test_config.py b/tests/build/enabling_neon_is_okay/test_config.py
deleted file mode 100644
index ba38941..0000000
--- a/tests/build/enabling_neon_is_okay/test_config.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from typing import Optional
-
-from ndk.abis import Abi
-from ndk.test.buildtest.case import Test
-
-
-def build_unsupported(test: Test) -> Optional[str]:
-    if test.config.abi != Abi("armeabi-v7a"):
-        return test.config.abi
-    return None
-
-
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_ARM_NEON=ON"]
diff --git a/tests/build/non_android_mk_build_script/jni/main.mk b/tests/build/ffs/jni/Android.mk
similarity index 62%
rename from tests/build/non_android_mk_build_script/jni/main.mk
rename to tests/build/ffs/jni/Android.mk
index d9d1555..ff4ea36 100644
--- a/tests/build/non_android_mk_build_script/jni/main.mk
+++ b/tests/build/ffs/jni/Android.mk
@@ -1,6 +1,6 @@
 LOCAL_PATH := $(call my-dir)
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
+LOCAL_MODULE := ffs
+LOCAL_SRC_FILES := ffs.c
 include $(BUILD_EXECUTABLE)
diff --git a/tests/build/ffs/jni/Application.mk b/tests/build/ffs/jni/Application.mk
new file mode 100644
index 0000000..5eaff6d
--- /dev/null
+++ b/tests/build/ffs/jni/Application.mk
@@ -0,0 +1 @@
+APP_ABI := x86
diff --git a/tests/build/ffs/jni/ffs.c b/tests/build/ffs/jni/ffs.c
new file mode 100644
index 0000000..8f411ce
--- /dev/null
+++ b/tests/build/ffs/jni/ffs.c
@@ -0,0 +1,10 @@
+#include <strings.h>
+
+#if !defined(__LP64__) && __ANDROID_API__ >= 18
+#error Test misconfigured or minimum API is now 18
+#error In the latter case please remove legacy_strings_inlines.h from bionic.
+#endif
+
+int main(int argc, char* argv[]) {
+  return ffs(argc);
+}
diff --git a/tests/build/fob64_19/CMakeLists.txt b/tests/build/fob64_19/CMakeLists.txt
new file mode 100644
index 0000000..3f501e0
--- /dev/null
+++ b/tests/build/fob64_19/CMakeLists.txt
@@ -0,0 +1,6 @@
+cmake_minimum_required(VERSION 3.6.0)
+
+add_library(libfoo
+  STATIC
+    jni/foo.cpp
+)
diff --git a/tests/build/page_size_compat/jni/Android.mk b/tests/build/fob64_19/jni/Android.mk
similarity index 100%
rename from tests/build/page_size_compat/jni/Android.mk
rename to tests/build/fob64_19/jni/Android.mk
diff --git a/tests/build/fob64_19/jni/Application.mk b/tests/build/fob64_19/jni/Application.mk
new file mode 100644
index 0000000..05e396c
--- /dev/null
+++ b/tests/build/fob64_19/jni/Application.mk
@@ -0,0 +1,2 @@
+APP_STL := c++_static
+APP_PLATFORM := android-19
diff --git a/tests/build/fob64_19/jni/foo.cpp b/tests/build/fob64_19/jni/foo.cpp
new file mode 100644
index 0000000..584c576
--- /dev/null
+++ b/tests/build/fob64_19/jni/foo.cpp
@@ -0,0 +1,10 @@
+#define _FILE_OFFSET_BITS 64
+#include <cstdio>
+
+namespace {
+
+// These should be unavailable before android-24, and available afterward.
+using ::fgetpos;
+using ::fsetpos;
+
+}
diff --git a/tests/build/fob64_19/test_config.py b/tests/build/fob64_19/test_config.py
new file mode 100644
index 0000000..9822f4f
--- /dev/null
+++ b/tests/build/fob64_19/test_config.py
@@ -0,0 +1,12 @@
+def is_negative_test():
+    return True
+
+
+def extra_cmake_flags():
+    return ['-DANDROID_PLATFORM=android-19']
+
+
+def build_unsupported(test):
+    if '64' in test.config.abi:
+        return test.config.abi
+    return None
diff --git a/tests/build/fob64_21/test_config.py b/tests/build/fob64_21/test_config.py
index 9ea224a..d082c4f 100644
--- a/tests/build/fob64_21/test_config.py
+++ b/tests/build/fob64_21/test_config.py
@@ -3,10 +3,10 @@
 
 
 def extra_cmake_flags():
-    return ["-DANDROID_PLATFORM=android-21"]
+    return ['-DANDROID_PLATFORM=android-21']
 
 
 def build_unsupported(test):
-    if "64" in test.config.abi:
+    if '64' in test.config.abi:
         return test.config.abi
     return None
diff --git a/tests/build/fob64_24/test_config.py b/tests/build/fob64_24/test_config.py
index fbbd2ee..f1c51cd 100644
--- a/tests/build/fob64_24/test_config.py
+++ b/tests/build/fob64_24/test_config.py
@@ -1,8 +1,8 @@
 def extra_cmake_flags():
-    return ["-DANDROID_PLATFORM=android-24"]
+    return ['-DANDROID_PLATFORM=android-24']
 
 
 def build_unsupported(test):
-    if "64" in test.config.abi:
+    if '64' in test.config.abi:
         return test.config.abi
     return None
diff --git a/tests/build/fortify_strcpy_available/test_config.py b/tests/build/fortify_strcpy_available/test_config.py
index 9d9b554..71b6b80 100644
--- a/tests/build/fortify_strcpy_available/test_config.py
+++ b/tests/build/fortify_strcpy_available/test_config.py
@@ -1,2 +1,7 @@
+from typing import Optional
+
+from ndk.test.types import Test
+
+
 def is_negative_test() -> bool:
     return True
diff --git a/tests/build/gc_sections/project/CMakeLists.txt b/tests/build/gc_sections/project/CMakeLists.txt
deleted file mode 100644
index 4955ea3..0000000
--- a/tests/build/gc_sections/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project("--gc-sections test")
-
-add_library(foo SHARED jni/foo.cpp)
diff --git a/tests/build/gc_sections/project/jni/Android.mk b/tests/build/gc_sections/project/jni/Android.mk
deleted file mode 100644
index 365bbad..0000000
--- a/tests/build/gc_sections/project/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/gc_sections/project/jni/foo.cpp b/tests/build/gc_sections/project/jni/foo.cpp
deleted file mode 100644
index 85e6cd8..0000000
--- a/tests/build/gc_sections/project/jni/foo.cpp
+++ /dev/null
@@ -1 +0,0 @@
-void foo() {}
diff --git a/tests/build/gc_sections/test.py b/tests/build/gc_sections/test.py
deleted file mode 100644
index 8bb436a..0000000
--- a/tests/build/gc_sections/test.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that -Wl,--gc-sections is used, but only on release builds.
-
-This flag should not be present for debug builds because that can strip functions that
-the user may want to evaluate while debugging.
-
-https://github.com/android/ndk/issues/1717
-https://github.com/android/ndk/issues/1813
-"""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Checks correct --gc-sections use."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=Release")
-    verifier.with_ndk_build_flag("APP_DEBUG=false")
-    verifier.expect_flag("-Wl,--gc-sections")
-    passed, message = verifier.verify().make_test_result_tuple(
-        "With -DCMAKE_BUILD_TYPE=Release and APP_DEBUG=false"
-    )
-    if not passed:
-        return passed, message
-
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=RelWithDebInfo")
-    verifier.expect_flag("-Wl,--gc-sections")
-    passed, message = verifier.verify_cmake().make_test_result_tuple(
-        "With -DCMAKE_BUILD_TYPE=RelWithDebInfo"
-    )
-    if not passed:
-        return passed, message
-
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=MinSizeRel")
-    verifier.expect_flag("-Wl,--gc-sections")
-    passed, message = verifier.verify_cmake().make_test_result_tuple(
-        "With -DCMAKE_BUILD_TYPE=MinSizeRel"
-    )
-    if not passed:
-        return passed, message
-
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.with_cmake_flag("-DCMAKE_BUILD_TYPE=Debug")
-    verifier.with_ndk_build_flag("APP_DEBUG=true")
-    verifier.expect_not_flag("-Wl,--gc-sections")
-    return verifier.verify().make_test_result_tuple(
-        "With -DCMAKE_BUILD_TYPE=Debug and APP_DEBUG=true"
-    )
diff --git a/tests/build/gc_sections/test_config.py b/tests/build/gc_sections/test_config.py
deleted file mode 100644
index 869627d..0000000
--- a/tests/build/gc_sections/test_config.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from ndk.test.buildtest.case import Test
-from ndk.test.spec import CMakeToolchainFile
-
-
-def build_broken(test: Test) -> tuple[str | None, str | None]:
-    if test.config.toolchain_file is CMakeToolchainFile.Default:
-        return "new CMake toolchain", "https://github.com/android/ndk/issues/1813"
-    return None, None
diff --git a/tests/build/gradle_injected_import_path/test_config.py b/tests/build/gradle_injected_import_path/test_config.py
index 0cdc24d..d2af0b4 100644
--- a/tests/build/gradle_injected_import_path/test_config.py
+++ b/tests/build/gradle_injected_import_path/test_config.py
@@ -6,4 +6,4 @@
 
 
 def extra_ndk_build_flags() -> List[str]:
-    return [f"NDK_GRADLE_INJECTED_IMPORT_PATH={THIS_DIR}"]
+    return [f'NDK_GRADLE_INJECTED_IMPORT_PATH={THIS_DIR}']
diff --git a/tests/build/headers-include/build.sh b/tests/build/headers-include/build.sh
new file mode 100755
index 0000000..65c6456
--- /dev/null
+++ b/tests/build/headers-include/build.sh
@@ -0,0 +1,100 @@
+#!/bin/sh
+# Check if some platform headers can be included alone
+# See b.android.com/64679 for one of them
+#
+
+export ANDROID_NDK_ROOT=$NDK
+
+NDK_BUILDTOOLS_PATH=$NDK/build/tools
+. $NDK/build/tools/prebuilt-common.sh
+
+JOBS=1
+for OPT; do
+    case $OPT in
+        -j*)
+            JOBS=${OPT##-j}
+            ;;
+        --jobs=*)
+            JOBS=${OPT##--jobs=}
+            ;;
+        APP_ABI=*)
+            eval readonly "$OPT"
+            ;;
+    esac
+done
+
+INTERNAL_HEADERS="sys/_errdefs.h sys/_sigdefs.h sys/_system_properties.h"
+INVALID_HEADERS_FOR_64BIT="time64.h sys/user.h"  # ToDo: remove sys/user.h later once __u64 and __u32 are defined for x86_64
+
+# This header doesn't compile unless included from C++ with
+# --std=c++-0x or higher, so skip it here.
+INTERNAL_HEADERS="$INTERNAL_HEADERS uchar.h"
+
+ABIS="$PREBUILT_ABIS"
+if [ -n "$APP_ABI" ]; then
+    ABIS=$(commas_to_spaces $APP_ABI)
+fi
+
+for ABI in $ABIS; do
+    ARCH=$(convert_abi_to_arch $ABI)
+    for API_LEVEL in $API_LEVELS; do
+        ARCH_PATH="$ANDROID_NDK_ROOT/platforms/android-$API_LEVEL/arch-$ARCH"
+        if [ ! -d "$ARCH_PATH" ]; then
+            continue
+        fi
+        HEADERS=$(cd $ARCH_PATH/usr/include; \
+            ls *.h sys/*.h android/*.h EGL/*.h GLES/*.h GLES2/*.h GLES3/*.h \
+               OMXAL/*.h SLES/*.h 2> /dev/null)
+        #echo $API_LEVEL $ARCH HEADERS=$HEADERS
+        # Create temporary project
+        PROJECT_DIR=build-dir
+        (mkdir -p "$PROJECT_DIR" && rm -rf "$PROJECT_DIR"/*) || panic "Can't create dir: $PROJECT_DIR"
+        mkdir "$PROJECT_DIR"/jni
+        cat > $PROJECT_DIR/jni/Application.mk <<EOF
+# Auto-generated - DO NOT EDIT
+APP_PLATFORM=android-$API_LEVEL
+APP_ABI=$ABI
+APP_MODULES=libfoo
+APP_DEPRECATED_HEADERS := true
+EOF
+        ANDROID_MK=$PROJECT_DIR/jni/Android.mk
+        cat > $ANDROID_MK <<EOF
+# Auto-generated - DO NOT EDIT
+LOCAL_PATH := \$(call my-dir)
+
+include \$(CLEAR_VARS)
+LOCAL_MODULE := libfoo
+LOCAL_SRC_FILES := \\
+EOF
+        for HEADER in $HEADERS; do
+            if [ "$INTERNAL_HEADERS" != "${INTERNAL_HEADERS%%$HEADER*}" ] ; then
+                continue;
+            fi
+            if [ "$ABI" != "${ABI%%64*}" ] ; then
+                if [ "$INVALID_HEADERS_FOR_64BIT" != "${INVALID_HEADERS_FOR_64BIT%%$HEADER*}" ] ; then
+                    continue;
+                fi
+            fi
+            NAME=$(echo "$HEADER" | tr '/' '__' | tr '.' '_' | tr '-' '_')
+            SRC=$NAME.c
+            SRC_FILE=$PROJECT_DIR/jni/$SRC
+            cat > $PROJECT_DIR/jni/$SRC <<EOF
+/* Auto-generated - DO NOT EDIT */
+#include <$HEADER>
+
+char dummy_$NAME = 0;
+EOF
+            echo "    $SRC \\" >> $ANDROID_MK
+        done
+        cat >> $ANDROID_MK <<EOF
+
+include \$(BUILD_STATIC_LIBRARY)
+
+EOF
+        echo "Checking headers for android-$API_LEVEL/$ABI"
+        $ANDROID_NDK_ROOT/ndk-build -C "$PROJECT_DIR" -B -j$JOBS V=1
+        fail_panic "Can't compile header for android-$API_LEVEL/$ABI, to reproduce: $ANDROID_NDK_ROOT/ndk-build -C $PROJECT_DIR"
+
+        rm -rf "$PROJECT_DIR"
+    done  # for ABI
+done  # for API_LEVEL
diff --git a/tests/build/import-install/README b/tests/build/import-install/README
new file mode 100644
index 0000000..417b7cb
--- /dev/null
+++ b/tests/build/import-install/README
@@ -0,0 +1,3 @@
+The purpose of this test is to check that imported shared libraries
+are properly installed to the target location, i.e. libs/<abi>/
+
diff --git a/tests/build/import-install/build.sh b/tests/build/import-install/build.sh
new file mode 100755
index 0000000..1bdcb8a
--- /dev/null
+++ b/tests/build/import-install/build.sh
@@ -0,0 +1,45 @@
+cd `dirname $0`
+PWD=$(pwd)
+
+# Update NDK_MODULE_PATH so we can find our imported modules
+export NDK_MODULE_PATH="$PWD"
+
+# Build everything
+$NDK/ndk-build "$@"
+
+# Extract ABIs list from parameters, we're looking for something like APP_ABI=<something>
+PARAM_ABIS=$(echo "$@" | tr ' ' '\n' | grep -e "^APP_ABI=")
+PARAM_ABIS=${PARAM_ABIS##APP_ABI=}
+if [ -z "$PARAM_ABIS" ]; then
+    echo "NO ABIS in param '$@'"
+    ABIS="armeabi-v7a x86"
+else
+    echo "FOUND ABIS in param '$@': $PARAM_ABIS"
+    ABIS="$PARAM_ABIS"
+fi
+
+# Now ensure that all files were installed to all supported ABIs
+ANDROID_NDK_ROOT=$NDK
+NDK_BUILDTOOLS_PATH=$NDK/build/tools
+source $NDK_BUILDTOOLS_PATH/prebuilt-common.sh
+MISSING=
+for ABI in $ABIS; do
+    DIR=$PWD/libs/$ABI
+    SUFFIX=$(get_lib_suffix_for_abi $ABI)
+    for FILENAME in libfoo$SUFFIX libpath1$SUFFIX libpath2$SUFFIX; do
+        FILE=$DIR/$FILENAME
+        if [ ! -f "$FILE" ]; then
+            MISSING="$MISSING $FILE"
+	fi
+    done
+done
+
+# In case of missing files, error out
+if [ "$MISSING" ]; then
+    echo "ERROR: Missing files in build tree:"
+    for FILE in $MISSING; do echo "  $FILE"; done
+    exit 1
+fi
+
+# Otherwise, our test is good
+exit 0
diff --git a/tests/build/import-install/jni/Android.mk b/tests/build/import-install/jni/Android.mk
new file mode 100644
index 0000000..f6a3176
--- /dev/null
+++ b/tests/build/import-install/jni/Android.mk
@@ -0,0 +1,10 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libfoo
+LOCAL_SRC_FILES := main.c
+LOCAL_SHARED_LIBRARIES := libpath1
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,path1)
+
diff --git a/tests/build/import-install/jni/Application.mk b/tests/build/import-install/jni/Application.mk
new file mode 100644
index 0000000..a252a72
--- /dev/null
+++ b/tests/build/import-install/jni/Application.mk
@@ -0,0 +1 @@
+APP_ABI := all
diff --git a/tests/build/import-install/jni/main.c b/tests/build/import-install/jni/main.c
new file mode 100644
index 0000000..91aaf0e
--- /dev/null
+++ b/tests/build/import-install/jni/main.c
@@ -0,0 +1,7 @@
+#include "path1.h"
+
+int  foo(int  x)
+{
+	return path1(x) - 16;
+}
+
diff --git a/tests/build/import-install/path1/Android.mk b/tests/build/import-install/path1/Android.mk
new file mode 100644
index 0000000..99e5b2e
--- /dev/null
+++ b/tests/build/import-install/path1/Android.mk
@@ -0,0 +1,14 @@
+# This is a trivial shared library that will be imported
+# by the main project's binary. Note that it imports
+# another library
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libpath1
+LOCAL_SRC_FILES := path1.c
+LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
+LOCAL_SHARED_LIBRARIES := libpath2
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,path2)
diff --git a/tests/build/import-install/path1/path1.c b/tests/build/import-install/path1/path1.c
new file mode 100644
index 0000000..6ccfcae
--- /dev/null
+++ b/tests/build/import-install/path1/path1.c
@@ -0,0 +1,8 @@
+#include "path1.h"
+#include "path2.h"
+
+int  path1(int x)
+{
+    return path2(x + 1);
+}
+
diff --git a/tests/build/import-install/path1/path1.h b/tests/build/import-install/path1/path1.h
new file mode 100644
index 0000000..7453d16
--- /dev/null
+++ b/tests/build/import-install/path1/path1.h
@@ -0,0 +1,4 @@
+#ifndef PATH1_H
+#define PATH1_H
+extern int path1(int x);
+#endif
diff --git a/tests/build/import-install/path2/Android.mk b/tests/build/import-install/path2/Android.mk
new file mode 100644
index 0000000..3e06ca2
--- /dev/null
+++ b/tests/build/import-install/path2/Android.mk
@@ -0,0 +1,10 @@
+# This is a trivial shared library that will be imported
+# by 'libpath1', and hence by the project's main binary
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := libpath2
+LOCAL_SRC_FILES := path2.c
+LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/tests/build/import-install/path2/path2.c b/tests/build/import-install/path2/path2.c
new file mode 100644
index 0000000..a1e76e7
--- /dev/null
+++ b/tests/build/import-install/path2/path2.c
@@ -0,0 +1,7 @@
+#include "path2.h"
+
+int path2(int x)
+{
+	return x*42;
+}
+
diff --git a/tests/build/import-install/path2/path2.h b/tests/build/import-install/path2/path2.h
new file mode 100644
index 0000000..fc55614
--- /dev/null
+++ b/tests/build/import-install/path2/path2.h
@@ -0,0 +1,4 @@
+#ifndef PATH2_H
+#define PATH2_H
+extern int path2(int x);
+#endif
diff --git a/tests/build/issue21132-__ARM_ARCH__/jni/Android.mk b/tests/build/issue21132-__ARM_ARCH__/jni/Android.mk
new file mode 100644
index 0000000..66cdd20
--- /dev/null
+++ b/tests/build/issue21132-__ARM_ARCH__/jni/Android.mk
@@ -0,0 +1,7 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := issue21132-__ARM_ARCH__
+LOCAL_SRC_FILES := issue21132-__ARM_ARCH__.c
+include $(BUILD_EXECUTABLE)
+
diff --git a/tests/build/issue21132-__ARM_ARCH__/jni/Application.mk b/tests/build/issue21132-__ARM_ARCH__/jni/Application.mk
new file mode 100644
index 0000000..79b5a35
--- /dev/null
+++ b/tests/build/issue21132-__ARM_ARCH__/jni/Application.mk
@@ -0,0 +1,2 @@
+# Only armeabi-v7a* and x86 instruction for fast __swap32md
+APP_ABI := armeabi-v7a x86
diff --git a/tests/build/issue21132-__ARM_ARCH__/jni/issue21132-__ARM_ARCH__.c b/tests/build/issue21132-__ARM_ARCH__/jni/issue21132-__ARM_ARCH__.c
new file mode 100644
index 0000000..c3341e8
--- /dev/null
+++ b/tests/build/issue21132-__ARM_ARCH__/jni/issue21132-__ARM_ARCH__.c
@@ -0,0 +1,14 @@
+#if defined(__ARM_ARCH_5__)
+#error
+#elif defined(__ARM_ARCH_5T__)
+#error
+#elif defined(__ARM_ARCH_5E__)
+#error
+#elif defined(__ARM_ARCH_5TE__)
+#error
+#endif
+
+int main(int argc, char *argv[])
+{
+  return 0;
+}
diff --git a/tests/build/issue21132-__ARM_ARCH__/test_config.py b/tests/build/issue21132-__ARM_ARCH__/test_config.py
new file mode 100644
index 0000000..b144a90
--- /dev/null
+++ b/tests/build/issue21132-__ARM_ARCH__/test_config.py
@@ -0,0 +1,4 @@
+def build_unsupported(test):
+    if test.config.abi not in ('armeabi-v7a', 'x86'):
+        return test.config.abi
+    return None
diff --git a/tests/build/issue22336-ICE-emit-rtl/test_config.py b/tests/build/issue22336-ICE-emit-rtl/test_config.py
index 1db330b..84e726b 100644
--- a/tests/build/issue22336-ICE-emit-rtl/test_config.py
+++ b/tests/build/issue22336-ICE-emit-rtl/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
diff --git a/tests/build/issue22345-ICE-postreload/test_config.py b/tests/build/issue22345-ICE-postreload/test_config.py
index 1db330b..84e726b 100644
--- a/tests/build/issue22345-ICE-postreload/test_config.py
+++ b/tests/build/issue22345-ICE-postreload/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
diff --git a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Android.mk b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Android.mk
new file mode 100644
index 0000000..60ed9e2
--- /dev/null
+++ b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Android.mk
@@ -0,0 +1,7 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE
+LOCAL_SRC_FILES := issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c
+LOCAL_LDLIBS    += -lOpenSLES
+include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Application.mk b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Application.mk
new file mode 100644
index 0000000..e3af5f4
--- /dev/null
+++ b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/Application.mk
@@ -0,0 +1,2 @@
+APP_ABI := all
+APP_PLATFORM := android-14
\ No newline at end of file
diff --git a/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c
new file mode 100644
index 0000000..f20eb46
--- /dev/null
+++ b/tests/build/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE/jni/issue40625-SL_IID_ANDROIDBUFFERQUEUESOURCE.c
@@ -0,0 +1,7 @@
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+
+SLInterfaceID foo()
+{
+    return SL_IID_ANDROIDBUFFERQUEUESOURCE;
+}
diff --git a/tests/build/issue41297-atomic-64bit/jni/Application.mk b/tests/build/issue41297-atomic-64bit/jni/Application.mk
index 5186152..b0a7813 100644
--- a/tests/build/issue41297-atomic-64bit/jni/Application.mk
+++ b/tests/build/issue41297-atomic-64bit/jni/Application.mk
@@ -1 +1,2 @@
+# Note that MIPS doesn't support 64-bit atomic built-in yet
 APP_ABI := armeabi-v7a x86
diff --git a/tests/build/issue65705-asm-pc/test_config.py b/tests/build/issue65705-asm-pc/test_config.py
index 1db330b..84e726b 100644
--- a/tests/build/issue65705-asm-pc/test_config.py
+++ b/tests/build/issue65705-asm-pc/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
diff --git a/tests/build/libc_no_atexit/build.sh b/tests/build/libc_no_atexit/build.sh
new file mode 100755
index 0000000..f18099a
--- /dev/null
+++ b/tests/build/libc_no_atexit/build.sh
@@ -0,0 +1,43 @@
+# Check that the libc.so for all platforms, and all architectures
+# Does not export 'atexit' and '__dso_handle' symbols.
+#
+export ANDROID_NDK_ROOT=$NDK
+
+NDK_BUILDTOOLS_PATH=$NDK/build/tools
+. $NDK/build/tools/prebuilt-common.sh
+echo DEFAULT_ARCHS=$DEFAULT_ARCHS
+
+LIBRARIES=
+for ARCH in $DEFAULT_ARCHS; do
+  LIB=$(cd $NDK && find platforms -name "libc.so" | sed -e 's!^!'$NDK'/!' | grep arch-$ARCH)
+  LIBRARIES=$LIBRARIES" $LIB"
+done
+
+FAILURE=
+COUNT=0
+for LIB in $LIBRARIES; do
+  COUNT=$(( $COUNT + 1 ))
+  echo "Checking: $LIB"
+  readelf -s $LIB | grep -q -F " atexit"
+  if [ $? = 0 ]; then
+    echo "ERROR: $NDK/$LIB exposes 'atexit'!" >&2
+    FAILURE=true
+  fi
+  readelf -s $LIB | grep -q -F " __dso_handle"
+  if [ $? = 0 ]; then
+    echo "ERROR: $NDK/$LIB exposes '__dso_handle'!" >&2
+    FAILURE=true
+  fi
+done
+
+if [ "$COUNT" = 0 ]; then
+  echo "ERROR: Did not find any libc.so in $NDK/platforms!"
+  exit 1
+fi
+
+if [ "$FAILURE" ]; then
+  exit 1
+else
+  echo "All $COUNT libc.so are ok!"
+  exit 0
+fi
diff --git a/tests/build/libcxx_headers_no_android_support/test_config.py b/tests/build/libcxx_headers_no_android_support/test_config.py
index 42cf24e..b84c8fe 100644
--- a/tests/build/libcxx_headers_no_android_support/test_config.py
+++ b/tests/build/libcxx_headers_no_android_support/test_config.py
@@ -1,2 +1,2 @@
 def extra_cmake_flags():
-    return ["-DANDROID_PLATFORM=android-21"]
+    return ['-DANDROID_PLATFORM=android-21']
diff --git a/tests/build/build_id/__init__.py b/tests/build/link_order/__init__.py
similarity index 100%
rename from tests/build/build_id/__init__.py
rename to tests/build/link_order/__init__.py
diff --git a/tests/build/alignment_default/project/jni/Android.mk b/tests/build/link_order/project/jni/Android.mk
similarity index 86%
rename from tests/build/alignment_default/project/jni/Android.mk
rename to tests/build/link_order/project/jni/Android.mk
index 365bbad..22c955e 100644
--- a/tests/build/alignment_default/project/jni/Android.mk
+++ b/tests/build/link_order/project/jni/Android.mk
@@ -3,4 +3,5 @@
 include $(CLEAR_VARS)
 LOCAL_MODULE := foo
 LOCAL_SRC_FILES := foo.cpp
+LOCAL_LDFLAGS := -v
 include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/link_order/project/jni/Application.mk b/tests/build/link_order/project/jni/Application.mk
new file mode 100644
index 0000000..3b7baf1
--- /dev/null
+++ b/tests/build/link_order/project/jni/Application.mk
@@ -0,0 +1 @@
+APP_STL := c++_shared
diff --git a/tests/build/build_id/project/jni/foo.cpp b/tests/build/link_order/project/jni/foo.cpp
similarity index 100%
rename from tests/build/build_id/project/jni/foo.cpp
rename to tests/build/link_order/project/jni/foo.cpp
diff --git a/tests/build/link_order/test.py b/tests/build/link_order/test.py
new file mode 100644
index 0000000..d72af62
--- /dev/null
+++ b/tests/build/link_order/test.py
@@ -0,0 +1,140 @@
+#
+# Copyright (C) 2018 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Check for correct link order from ndk-build.
+"""
+import difflib
+import os
+import re
+import shlex
+import subprocess
+import sys
+
+
+def is_linked_item(arg):
+    """Returns True if the argument is an object or library to be linked."""
+    if arg.endswith('.a'):
+        return True
+    if arg.endswith('.o'):
+        return True
+    if arg.endswith('.so'):
+        return True
+    if arg.startswith('-l'):
+        return True
+    return False
+
+
+def find_link_args(link_line):
+    """Returns a list of objects and libraries in the link command."""
+    args = []
+
+    # A trivial attempt at parsing here is fine since we can assume that all
+    # our objects and libraries will not include spaces and we don't care about
+    # the rest of the arguments.
+    #
+    # Arguments could be quoted on Windows. shlex.split should be good enough:
+    # "C:/src/android-ndk-r17-beta1/build//../platforms/android-21/arch-x86_64/usr/lib/../lib64\\crtbegin_so.o"
+    skip_next = False
+    for word in shlex.split(link_line):
+        if skip_next:
+            skip_next = False
+            continue
+        if word in ('-o', '-soname', '--exclude-libs'):
+            skip_next = True
+            continue
+
+        if is_linked_item(word):
+            # Use just the base name so we can compare to an exact expected
+            # link order regardless of ABI.
+            if os.sep in word or (os.altsep and os.altsep in word):
+                word = os.path.basename(word)
+            args.append(word)
+    return args
+
+
+def check_link_order(link_line, abi, api):
+    """Determines if a given link command has the correct ordering.
+
+    Args:
+        link_line (string): The full ld command.
+
+    Returns:
+        Tuple of (success, diff). The diff will be None on success or a
+        difflib.unified_diff result with no line terminations, i.e. a generator
+        suitable for use with `' '.join()`. The diff represents the changes
+        between the expected link order and the actual link order.
+    """
+    libunwind_arg = ['libunwind.a'] if abi == 'armeabi-v7a' else []
+    android_support_arg = ['libandroid_support.a'] if api < 21 else []
+    expected = [
+        'crtbegin_so.o',
+        'foo.o',
+    ] + android_support_arg + libunwind_arg + [
+        # The most important part of this test is checking that libgcc comes
+        # *before* the shared libraries so we can be sure we're actually
+        # getting libgcc symbols rather than getting them from some shared
+        # library dependency that's re-exporting them.
+        '-lgcc',
+        '-latomic',
+        'libc++_shared.so',
+        '-lc',
+        '-lm',
+        '-lm',
+        '-lgcc',
+        '-ldl',
+        '-lc',
+        '-lgcc',
+        '-ldl',
+        'crtend_so.o',
+    ]
+    link_args = find_link_args(link_line)
+    if link_args == expected:
+        return True, None
+    return False, difflib.unified_diff(expected, link_args, lineterm='')
+
+
+def run_test(ndk_path, abi, platform, linker, build_flags):
+    """Checks clang's -v output for proper link ordering."""
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = proc.communicate()
+    out = out.decode('utf-8')
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    link_line = None
+    for line in out.splitlines():
+        if 'bin/ld' in re.sub(r'[/\\]+', '/', line):
+            if link_line is not None:
+                err_msg = 'Found duplicate link lines:\n{}\n{}'.format(
+                    link_line, line)
+                return False, err_msg
+            else:
+                link_line = line
+
+    if link_line is None:
+        return False, 'Did not find link line in out:\n{}'.format(out)
+
+    result, diff = check_link_order(link_line, abi, platform)
+    return result, '' if diff is None else os.linesep.join(diff)
diff --git a/tests/build/lld_rosegment/__init__.py b/tests/build/lld_rosegment/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/lld_rosegment/__init__.py
+++ /dev/null
diff --git a/tests/build/lld_rosegment/project/CMakeLists.txt b/tests/build/lld_rosegment/project/CMakeLists.txt
deleted file mode 100644
index 85ee4fe..0000000
--- a/tests/build/lld_rosegment/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.10)
-project(lld_rosegment)
-
-add_library(foo SHARED jni/foo.cpp)
diff --git a/tests/build/lld_rosegment/project/jni/Android.mk b/tests/build/lld_rosegment/project/jni/Android.mk
deleted file mode 100644
index 365bbad..0000000
--- a/tests/build/lld_rosegment/project/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/lld_rosegment/project/jni/foo.cpp b/tests/build/lld_rosegment/project/jni/foo.cpp
deleted file mode 100644
index 85e6cd8..0000000
--- a/tests/build/lld_rosegment/project/jni/foo.cpp
+++ /dev/null
@@ -1 +0,0 @@
-void foo() {}
diff --git a/tests/build/lld_rosegment/test.py b/tests/build/lld_rosegment/test.py
deleted file mode 100644
index b6173c0..0000000
--- a/tests/build/lld_rosegment/test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that -Wl,--no-rosegment is used when required.
-
-https://github.com/android/ndk/issues/1196
-"""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Checks correct --no-rosegment use."""
-    # The 'riscv64' ABI has a minimum supported version of 35, so the below
-    # tests of API level 29 and 30 are ignored (the CMake files will simply
-    # reset the value to the minimum supported version). Verify that the
-    # behavior after API level 30 is retained (--no-rosegment does not appear).
-    if config.abi == "riscv64":
-        verifier = FlagVerifier(Path("project"), Path(ndk_path), config).with_api(35)
-        verifier.expect_not_flag("-Wl,--no-rosegment")
-        return verifier.verify().make_test_result_tuple()
-
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config).with_api(29)
-    verifier.expect_flag("-Wl,--no-rosegment")
-    verifier.expect_not_flag("-Wl,--rosegment")
-    result = verifier.verify()
-    if result.failed():
-        return result.make_test_result_tuple()
-
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config).with_api(30)
-    verifier.expect_not_flag("-Wl,--no-rosegment")
-    return verifier.verify().make_test_result_tuple()
diff --git a/tests/build/log2-libcxx/jni/Android.mk b/tests/build/log2-libcxx/jni/Android.mk
new file mode 100644
index 0000000..f10dc6a
--- /dev/null
+++ b/tests/build/log2-libcxx/jni/Android.mk
@@ -0,0 +1,10 @@
+LOCAL_PATH := $(call my-dir)
+
+# The intention of this test is to ensure that we get the include ordering right
+# for libc++ -> libandroid_support -> libc, and to make sure we're actually
+# linking libandroid_support with libc++_shared.
+# http://b.android.com/212634
+include $(CLEAR_VARS)
+LOCAL_MODULE := log2_test
+LOCAL_SRC_FILES := log2_test.cpp
+include $(BUILD_EXECUTABLE)
diff --git a/tests/build/log2-libcxx/jni/Application.mk b/tests/build/log2-libcxx/jni/Application.mk
new file mode 100644
index 0000000..3b7baf1
--- /dev/null
+++ b/tests/build/log2-libcxx/jni/Application.mk
@@ -0,0 +1 @@
+APP_STL := c++_shared
diff --git a/tests/build/log2-libcxx/jni/log2_test.cpp b/tests/build/log2-libcxx/jni/log2_test.cpp
new file mode 100644
index 0000000..d5aa026
--- /dev/null
+++ b/tests/build/log2-libcxx/jni/log2_test.cpp
@@ -0,0 +1,6 @@
+#include <cmath>
+#include <iostream>
+
+int main(int, char**) {
+  std::cout << std::log2(8) << std::endl;
+}
diff --git a/tests/build/allow_missing_prebuilt/__init__.py b/tests/build/mstack-protector-guard/__init__.py
similarity index 100%
rename from tests/build/allow_missing_prebuilt/__init__.py
rename to tests/build/mstack-protector-guard/__init__.py
diff --git a/tests/build/alignment_compat/project/jni/Android.mk b/tests/build/mstack-protector-guard/project/jni/Android.mk
similarity index 100%
rename from tests/build/alignment_compat/project/jni/Android.mk
rename to tests/build/mstack-protector-guard/project/jni/Android.mk
diff --git a/tests/build/branch-protection/project/jni/foo.cpp b/tests/build/mstack-protector-guard/project/jni/foo.cpp
similarity index 100%
rename from tests/build/branch-protection/project/jni/foo.cpp
rename to tests/build/mstack-protector-guard/project/jni/foo.cpp
diff --git a/tests/build/mstack-protector-guard/test.py b/tests/build/mstack-protector-guard/test.py
new file mode 100644
index 0000000..6285c28
--- /dev/null
+++ b/tests/build/mstack-protector-guard/test.py
@@ -0,0 +1,53 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Check for mstack-protector-guard=global when targeting old x86 targets.
+
+https://gcc.gnu.org/ml/gcc/2015-11/msg00060.html changed the default for this
+from using a global to using the TLS slot. As noted in
+https://github.com/android-ndk/ndk/issues/297 (and in that commit), this is not
+compatible with pre-4.2 devices, so we need to guard against that in the NDK.
+"""
+import os
+import subprocess
+import sys
+
+
+def run_test(ndk_path, abi, platform, linker, build_flags):
+    """Checks ndk-build V=1 output for mstackrealign flag."""
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = proc.communicate()
+    out = out.decode('utf-8')
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    search_text = '-mstack-protector-guard=global'
+    out_words = out.split(' ')
+    if search_text in out_words:
+        print('Found unexpceted {} in output:\n{}'.format(search_text, out))
+        return False, out
+    else:
+        return True, out
diff --git a/tests/build/mstackrealign/test.py b/tests/build/mstackrealign/test.py
index 67f5ed1..573d8f7 100644
--- a/tests/build/mstackrealign/test.py
+++ b/tests/build/mstackrealign/test.py
@@ -18,20 +18,34 @@
 http://b.android.com/222239 reports that old x86 targets have stack alignment
 issues. For these devices, verify that mstackrealign is used.
 """
-from pathlib import Path
-from typing import Optional
-
-from ndk.abis import Abi
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
+import os
+import subprocess
+import sys
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path, abi, platform, linker, build_flags):
     """Checks ndk-build V=1 output for mstackrealign flag."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    assert config.api is not None
-    if config.abi == Abi("x86") and config.api < 24:
-        verifier.expect_flag("-mstackrealign")
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    out, _ = proc.communicate()
+    out = out.decode('utf-8')
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    out_words = out.split(' ')
+    if abi == 'x86' and platform < 24:
+        result = '-mstackrealign' in out_words
     else:
-        verifier.expect_not_flag("-mstackrealign")
-    return verifier.verify_ndk_build().make_test_result_tuple()
+        result = '-mstackrealign' not in out_words
+
+    return result, out
diff --git a/tests/build/multi-abi/README b/tests/build/multi-abi/README
index a1e68b1..228d193 100644
--- a/tests/build/multi-abi/README
+++ b/tests/build/multi-abi/README
@@ -1,6 +1,6 @@
 This test checks that we can build the same binary for multiple architectures
 in a single ndk-build invokation, i.e. when using:
 
-  APP_ABI := armeabi-v7a x86
+  APP_ABI := armeabi armeabi-v7a x86 mips
 
 This failed in NDK r6 due to a bug in the build scripts.
diff --git a/tests/build/multi-abi/jni/Application.mk b/tests/build/multi-abi/jni/Application.mk
index 5186152..4a17da3 100644
--- a/tests/build/multi-abi/jni/Application.mk
+++ b/tests/build/multi-abi/jni/Application.mk
@@ -1 +1 @@
-APP_ABI := armeabi-v7a x86
+APP_ABI := armeabi-v7a x86 mips
diff --git a/tests/build/neon-asm/test_config.py b/tests/build/neon-asm/test_config.py
index 2f139ae..3f9f9cc 100644
--- a/tests/build/neon-asm/test_config.py
+++ b/tests/build/neon-asm/test_config.py
@@ -1,8 +1,8 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
 
 
 def extra_cmake_flags():
-    return ["-DANDROID_ARM_NEON=ON"]
+    return ['-DANDROID_ARM_NEON=ON']
diff --git a/tests/build/neon_tags_have_no_effect/jni/Android.mk b/tests/build/neon_tags_have_no_effect/jni/Android.mk
deleted file mode 100644
index c065910..0000000
--- a/tests/build/neon_tags_have_no_effect/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp bar.cpp.neon
-include $(BUILD_EXECUTABLE)
diff --git a/tests/build/neon_tags_have_no_effect/jni/bar.cpp b/tests/build/neon_tags_have_no_effect/jni/bar.cpp
deleted file mode 100644
index 542242d..0000000
--- a/tests/build/neon_tags_have_no_effect/jni/bar.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-#if !__ARM_NEON__
-#error __ARM_NEON__ expected but not defined
-#endif
-
-int bar() {
-  return 0;
-}
diff --git a/tests/build/neon_tags_have_no_effect/jni/foo.cpp b/tests/build/neon_tags_have_no_effect/jni/foo.cpp
deleted file mode 100644
index b7eaef2..0000000
--- a/tests/build/neon_tags_have_no_effect/jni/foo.cpp
+++ /dev/null
@@ -1,9 +0,0 @@
-extern int bar();
-
-#if !__ARM_NEON__
-#error __ARM_NEON__ expected but not defined
-#endif
-
-int main(int, char**) {
-  return bar();
-}
diff --git a/tests/build/neon_tags_have_no_effect/test_config.py b/tests/build/neon_tags_have_no_effect/test_config.py
deleted file mode 100644
index 51c5c7e..0000000
--- a/tests/build/neon_tags_have_no_effect/test_config.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from typing import Optional
-
-from ndk.abis import Abi
-from ndk.test.buildtest.case import Test
-
-
-def build_unsupported(test: Test) -> Optional[str]:
-    if test.config.abi != Abi("armeabi-v7a"):
-        return test.config.abi
-    return None
diff --git a/tests/build/no_platform_gaps/test.py b/tests/build/no_platform_gaps/test.py
index e636cac..47c8300 100644
--- a/tests/build/no_platform_gaps/test.py
+++ b/tests/build/no_platform_gaps/test.py
@@ -25,51 +25,40 @@
 meddling.
 """
 import os
-from pathlib import Path
 import subprocess
 import sys
 
-import ndk.testing.standalone_toolchain
 
-import ndk.abis
-from ndk.hosts import Host
-from ndk.test.spec import BuildConfiguration
-
-
-def build(ndk_dir: str, config: BuildConfiguration) -> tuple[bool, str]:
-    ndk_build = os.path.join(ndk_dir, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = "project"
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
-        "V=1",
+def build(ndk_dir, abi, platform, linker, build_flags):
+    ndk_build = os.path.join(ndk_dir, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
+        'V=1',
     ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", project_path] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-    )
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     out, _ = proc.communicate()
-    return proc.returncode == 0, out.decode("utf-8")
+    return proc.returncode == 0, out.decode('utf-8')
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, _platform, linker, build_flags):
     """Checks ndk-build V=1 output for correct compiler."""
     min_api = None
     max_api = None
     apis = []
-    host = Host.current().tag
-    triple = ndk.abis.arch_to_triple(ndk.abis.abi_to_arch(config.abi))
-    toolchain_dir = Path(ndk_path) / f"toolchains/llvm/prebuilt/{host}"
-    lib_dir = toolchain_dir / f"sysroot/usr/lib/{triple}"
-    for path in lib_dir.iterdir():
-        if not path.is_dir():
+    for name in os.listdir(os.path.join(ndk_path, 'platforms')):
+        if not name.startswith('android-'):
             continue
 
+        _, api_str = name.split('-')
+
         try:
-            api = int(path.name)
+            api = int(api_str)
         except ValueError:
             # Must have been a lettered release. Not relevant.
             continue
@@ -80,13 +69,10 @@
         if max_api is None or api > max_api:
             max_api = api
 
-    if min_api is None or max_api is None:
-        return False, "Found no platforms"
-
     missing_platforms = sorted(list(set(range(min_api, max_api)) - set(apis)))
     for api in missing_platforms:
-        result, out = build(ndk_path, config)
+        result, out = build(ndk_path, abi, api, linker, build_flags)
         if not result:
             return result, out
 
-    return True, ""
+    return True, ''
diff --git a/tests/build/no_undefined_version/project/CMakeLists.txt b/tests/build/no_undefined_version/project/CMakeLists.txt
deleted file mode 100644
index bbd6ddb..0000000
--- a/tests/build/no_undefined_version/project/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project("--no-undefined-version test" CXX)
-
-add_library(foo SHARED jni/foo.cpp)
diff --git a/tests/build/no_undefined_version/project/jni/Android.mk b/tests/build/no_undefined_version/project/jni/Android.mk
deleted file mode 100644
index 365bbad..0000000
--- a/tests/build/no_undefined_version/project/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/no_undefined_version/project/jni/foo.cpp b/tests/build/no_undefined_version/project/jni/foo.cpp
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/no_undefined_version/project/jni/foo.cpp
+++ /dev/null
diff --git a/tests/build/no_undefined_version/test.py b/tests/build/no_undefined_version/test.py
deleted file mode 100644
index bac11fd..0000000
--- a/tests/build/no_undefined_version/test.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check that -Wl,--no-undefined-version is used.
-
-Without this flag, LLD will not verify that the public symbols in a version script are
-present in the library.
-"""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Checks correct --no-undefined-version use."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.expect_flag("-Wl,--no-undefined-version")
-    result = verifier.verify()
-    if result.failed():
-        return result.make_test_result_tuple()
-
-    # LOCAL_* flags shouldn't normally be specified on the command-line, but per module
-    # in the Android.mk. It's unusual, but doing it this way lets us avoid duplicating
-    # the test.
-    verifier = (
-        FlagVerifier(Path("project"), Path(ndk_path), config)
-        .with_cmake_flag("-DANDROID_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS=ON")
-        .with_ndk_build_flag("LOCAL_ALLOW_UNDEFINED_VERSION_SCRIPT_SYMBOLS=true")
-    )
-    verifier.expect_not_flag("-Wl,--no-undefined-version")
-    return verifier.verify().make_test_result_tuple()
diff --git a/tests/build/non_android_mk_build_script/jni/Application.mk b/tests/build/non_android_mk_build_script/jni/Application.mk
deleted file mode 100644
index ce09535..0000000
--- a/tests/build/non_android_mk_build_script/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_STL := c++_static
diff --git a/tests/build/non_android_mk_build_script/jni/foo.cpp b/tests/build/non_android_mk_build_script/jni/foo.cpp
deleted file mode 100644
index e276249..0000000
--- a/tests/build/non_android_mk_build_script/jni/foo.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-int main(int argc, char** argv) {
-  return 0;
-}
diff --git a/tests/build/non_android_mk_build_script/test_config.py b/tests/build/non_android_mk_build_script/test_config.py
deleted file mode 100644
index ff31650..0000000
--- a/tests/build/non_android_mk_build_script/test_config.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from typing import List
-
-
-def extra_ndk_build_flags() -> List[str]:
-    return [
-        "APP_BUILD_SCRIPT=jni/main.mk",
-        "APP_PROJECT_PATH=null",
-        "NDK_OUT=obj",
-        "NDK_LIBS_OUT=libs",
-    ]
diff --git a/tests/build/page_size_compat/CMakeLists.txt b/tests/build/page_size_compat/CMakeLists.txt
deleted file mode 100644
index 7e7df62..0000000
--- a/tests/build/page_size_compat/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(PageSize LANGUAGES CXX)
-
-add_library(foo STATIC jni/foo.cpp)
diff --git a/tests/build/page_size_compat/jni/Application.mk b/tests/build/page_size_compat/jni/Application.mk
deleted file mode 100644
index ee2e673..0000000
--- a/tests/build/page_size_compat/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_SUPPORT_FLEXIBLE_PAGE_SIZES := true
diff --git a/tests/build/page_size_compat/jni/foo.cpp b/tests/build/page_size_compat/jni/foo.cpp
deleted file mode 100644
index 7b404e1..0000000
--- a/tests/build/page_size_compat/jni/foo.cpp
+++ /dev/null
@@ -1,5 +0,0 @@
-#include <sys/user.h>
-
-#if defined(PAGE_SIZE)
-#error "PAGE_SIZE is defined and should not be"
-#endif
diff --git a/tests/build/page_size_compat/test_config.py b/tests/build/page_size_compat/test_config.py
deleted file mode 100644
index 1f934f1..0000000
--- a/tests/build/page_size_compat/test_config.py
+++ /dev/null
@@ -1,2 +0,0 @@
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON"]
diff --git a/tests/build/page_size_default/CMakeLists.txt b/tests/build/page_size_default/CMakeLists.txt
deleted file mode 100644
index 7e7df62..0000000
--- a/tests/build/page_size_default/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(PageSize LANGUAGES CXX)
-
-add_library(foo STATIC jni/foo.cpp)
diff --git a/tests/build/page_size_default/jni/Android.mk b/tests/build/page_size_default/jni/Android.mk
deleted file mode 100644
index 7ce18d5..0000000
--- a/tests/build/page_size_default/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_STATIC_LIBRARY)
diff --git a/tests/build/page_size_default/jni/foo.cpp b/tests/build/page_size_default/jni/foo.cpp
deleted file mode 100644
index 96d34a6..0000000
--- a/tests/build/page_size_default/jni/foo.cpp
+++ /dev/null
@@ -1,5 +0,0 @@
-#include <sys/user.h>
-
-#if !defined(PAGE_SIZE)
-#error "PAGE_SIZE is not defined"
-#endif
diff --git a/tests/build/prebuilt-copy/build.sh b/tests/build/prebuilt-copy/build.sh
index 7c099ac..b666ce8 100755
--- a/tests/build/prebuilt-copy/build.sh
+++ b/tests/build/prebuilt-copy/build.sh
@@ -33,7 +33,7 @@
   done
 
   if [ -z "$ABIS" ]; then
-    ABIS="armeabi-v7a x86"
+    ABIS="armeabi-v7a x86 mips"
   fi
 fi
 
diff --git a/tests/build/project-properties/default.properties b/tests/build/project-properties/default.properties
new file mode 100644
index 0000000..a2f6cb9
--- /dev/null
+++ b/tests/build/project-properties/default.properties
@@ -0,0 +1,3 @@
+# This value should be ignored because project.properties
+# exists and provides a different one.
+target=android-5
diff --git a/tests/build/project-properties/jni/Android.mk b/tests/build/project-properties/jni/Android.mk
new file mode 100644
index 0000000..e53c7b9
--- /dev/null
+++ b/tests/build/project-properties/jni/Android.mk
@@ -0,0 +1,14 @@
+WANTED_PLATFORM_32 := android-16
+WANTED_PLATFORM_64 := android-21
+
+ifeq (,$(filter %64, $(TARGET_ARCH)))
+  ifneq ($(TARGET_PLATFORM),$(WANTED_PLATFORM_32))
+    $(error Incorrect target platform: $(TARGET_PLATFORM) (expecteding $(WANTED_PLATFORM_32)))
+  endif
+  $(call ndk_log,Test OK: Correct target platform retrieved from project.properties: $(TARGET_PLATFORM))
+else
+  ifneq ($(TARGET_PLATFORM),$(WANTED_PLATFORM_64))
+    $(error Incorrect target platform: $(TARGET_PLATFORM) (expecteding $(WANTED_PLATFORM_64)))
+  endif
+  $(call ndk_log,Test OK: Correct target platform retrieved from project.properties but auto-adjusted: $(TARGET_PLATFORM))
+endif
diff --git a/tests/build/project-properties/jni/Application.mk b/tests/build/project-properties/jni/Application.mk
new file mode 100644
index 0000000..a252a72
--- /dev/null
+++ b/tests/build/project-properties/jni/Application.mk
@@ -0,0 +1 @@
+APP_ABI := all
diff --git a/tests/build/project-properties/project.properties b/tests/build/project-properties/project.properties
new file mode 100644
index 0000000..ab5ac37
--- /dev/null
+++ b/tests/build/project-properties/project.properties
@@ -0,0 +1,3 @@
+# This is the value that should be kept by the build system
+# Instead of the one under default.properties
+target=android-9
diff --git a/tests/build/rs-hello-compute/jni/Android.mk b/tests/build/rs-hello-compute/jni/Android.mk
new file mode 100644
index 0000000..6b6ecd5
--- /dev/null
+++ b/tests/build/rs-hello-compute/jni/Android.mk
@@ -0,0 +1,29 @@
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# This is the shared library included by the JNI test app.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := hellocomputendk
+LOCAL_SRC_FILES := helloComputeNDK.cpp mono.rs
+LOCAL_LDLIBS := -ljnigraphics -llog
+LOCAL_RENDERSCRIPT_COMPATIBILITY := true
+LOCAL_SHARED_LIBRARIES := RSSupport blasV8
+LOCAL_STATIC_LIBRARIES := RScpp_static
+include $(BUILD_SHARED_LIBRARY)
+
+$(call import-module,android/renderscript)
diff --git a/build/cmake/hooks/post/Android.cmake b/tests/build/rs-hello-compute/jni/Application.mk
similarity index 77%
copy from build/cmake/hooks/post/Android.cmake
copy to tests/build/rs-hello-compute/jni/Application.mk
index 523f4e8..29d2081 100644
--- a/build/cmake/hooks/post/Android.cmake
+++ b/tests/build/rs-hello-compute/jni/Application.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2016 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,5 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# This is a hook file that will be included by cmake at the end of
-# Modules/Platform/Android.cmake.
+APP_PLATFORM := android-19
+APP_STL := c++_shared
diff --git a/tests/build/rs-hello-compute/jni/helloComputeNDK.cpp b/tests/build/rs-hello-compute/jni/helloComputeNDK.cpp
new file mode 100644
index 0000000..a3d59ea
--- /dev/null
+++ b/tests/build/rs-hello-compute/jni/helloComputeNDK.cpp
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <jni.h>
+#include <android/log.h>
+#include <android/bitmap.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <math.h>
+
+#include "RenderScript.h"
+
+#include "ScriptC_mono.h"
+
+#define  LOG_TAG    "HelloComputeNDK"
+#define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
+#define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
+
+using namespace android::RSC;
+
+extern "C" JNIEXPORT void JNICALL
+Java_com_example_android_rs_hellocomputendk_HelloComputeNDK_nativeMono(JNIEnv * env,
+                                                                       jclass,
+                                                                       jstring pathObj,
+                                                                       jint X,
+                                                                       jint Y,
+                                                                       jobject jbitmapIn,
+                                                                       jobject jbitmapOut
+                                                                       )
+{
+    void* inputPtr = nullptr;
+    void* outputPtr = nullptr;
+
+    AndroidBitmap_lockPixels(env, jbitmapIn, &inputPtr);
+    AndroidBitmap_lockPixels(env, jbitmapOut, &outputPtr);
+
+    const char * path = env->GetStringUTFChars(pathObj, nullptr);
+    sp<RS> rs = new RS();
+    rs->init(path);
+    env->ReleaseStringUTFChars(pathObj, path);
+
+    sp<const Element> e = Element::RGBA_8888(rs);
+
+    sp<const Type> t = Type::create(rs, e, X, Y, 0);
+
+    sp<Allocation> inputAlloc = Allocation::createTyped(rs, t, RS_ALLOCATION_MIPMAP_NONE,
+                                                        RS_ALLOCATION_USAGE_SHARED | RS_ALLOCATION_USAGE_SCRIPT,
+                                                        inputPtr);
+    sp<Allocation> outputAlloc = Allocation::createTyped(rs, t, RS_ALLOCATION_MIPMAP_NONE,
+                                                         RS_ALLOCATION_USAGE_SHARED | RS_ALLOCATION_USAGE_SCRIPT,
+                                                         outputPtr);
+
+
+    inputAlloc->copy2DRangeFrom(0, 0, X, Y, inputPtr);
+    ScriptC_mono* sc = new ScriptC_mono(rs);
+    sc->forEach_root(inputAlloc, outputAlloc);
+    outputAlloc->copy2DRangeTo(0, 0, X, Y, outputPtr);
+
+    AndroidBitmap_unlockPixels(env, jbitmapIn);
+    AndroidBitmap_unlockPixels(env, jbitmapOut);
+
+}
diff --git a/tests/build/rs-hello-compute/jni/mono.rs b/tests/build/rs-hello-compute/jni/mono.rs
new file mode 100644
index 0000000..61f9d21
--- /dev/null
+++ b/tests/build/rs-hello-compute/jni/mono.rs
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.example.android.rs.hellocomputendk)
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+    float4 f4 = rsUnpackColor8888(*v_in);
+
+    float3 mono = dot(f4.rgb, gMonoMult) + 0.01f;
+    *v_out = rsPackColorTo8888(mono);
+}
+
diff --git a/tests/build/shell_quotation/__init__.py b/tests/build/shell_quotation/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/shell_quotation/__init__.py
+++ /dev/null
diff --git a/tests/build/shell_quotation/project/jni/Android.mk b/tests/build/shell_quotation/project/jni/Android.mk
deleted file mode 100644
index b65da37..0000000
--- a/tests/build/shell_quotation/project/jni/Android.mk
+++ /dev/null
@@ -1,13 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_SHORT_COMMANDS := false
-include $(BUILD_SHARED_LIBRARY)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo_short_local
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_SHORT_COMMANDS := true
-include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/shell_quotation/project/jni/foo.cpp b/tests/build/shell_quotation/project/jni/foo.cpp
deleted file mode 100644
index 85e6cd8..0000000
--- a/tests/build/shell_quotation/project/jni/foo.cpp
+++ /dev/null
@@ -1 +0,0 @@
-void foo() {}
diff --git a/tests/build/shell_quotation/test.py b/tests/build/shell_quotation/test.py
deleted file mode 100644
index f3e6ed2..0000000
--- a/tests/build/shell_quotation/test.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Copyright (C) 2022 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Check for correct addition of shell quotes around fragile arguments.
-"""
-import json
-import os
-import subprocess
-import sys
-import textwrap
-
-from ndk.test.spec import BuildConfiguration
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
-    """Checks that shell quotations are applied to a fragile argument."""
-    ndk_build = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = "project"
-    fragile_flag = '-Dfooyoo="a + b"'
-    fragile_argument = "APP_CFLAGS+=" + fragile_flag
-    quoted_fragile_flag = "'-Dfooyoo=a + b'"
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
-        fragile_argument,
-        "-B",
-        "compile_commands.json",
-    ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", project_path] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
-    out, _ = proc.communicate()
-    if proc.returncode != 0:
-        return proc.returncode == 0, out
-
-    cc_json = os.path.join(project_path, "compile_commands.json")
-    if not os.path.exists(cc_json):
-        return False, "{} does not exist".format(cc_json)
-
-    with open(cc_json, encoding="utf-8") as cc_json_file:
-        contents = json.load(cc_json_file)
-    command_default = contents[0]["command"]
-    command_short_local = contents[1]["command"]
-    if not quoted_fragile_flag in command_default:
-        return False, textwrap.dedent(
-            f"""\
-            {config.abi} compile_commands.json file had wrong contents for default command:
-            Expected to contain: {quoted_fragile_flag}
-            Actual: {command_default}"""
-        )
-    if not fragile_flag in command_short_local:
-        return False, textwrap.dedent(
-            f"""\
-            {config.abi} compile_commands.json file had wrong contents for short-local command:
-            Expected to contain: {fragile_flag}
-            Actual: {command_short_local}"""
-        )
-
-    return True, ""
diff --git a/tests/build/short-commands-escape/test_config.py b/tests/build/short-commands-escape/test_config.py
index 227d415..ad013d7 100644
--- a/tests/build/short-commands-escape/test_config.py
+++ b/tests/build/short-commands-escape/test_config.py
@@ -2,11 +2,11 @@
 import sys
 
 
-def build_unsupported(_test):
-    if sys.platform != "win32":
+def build_unsupported(test):
+    if sys.platform != 'win32':
         return sys.platform
     return None
 
 
 def extra_ndk_build_flags():
-    return ["NDK_OUT=foo\\bar"]
+    return ['NDK_OUT=foo\\bar']
diff --git a/tests/build/signal/test_config.py b/tests/build/signal/test_config.py
index fa599e6..e9d5f66 100644
--- a/tests/build/signal/test_config.py
+++ b/tests/build/signal/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi in ("arm64-v8a", "riscv64", "x86_64"):
+    if test.config.abi in ('arm64-v8a', 'x86_64'):
         return test.config.abi
     return None
diff --git a/tests/build/ssax-instructions/jni/Android.mk b/tests/build/ssax-instructions/jni/Android.mk
index b966fc4..40aa6e7 100644
--- a/tests/build/ssax-instructions/jni/Android.mk
+++ b/tests/build/ssax-instructions/jni/Android.mk
@@ -4,4 +4,5 @@
 LOCAL_MODULE := ssax_instruction
 LOCAL_ARM_NEON := true
 LOCAL_SRC_FILES := test.S
+LOCAL_ASFLAGS := -fno-integrated-as
 include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/ssax-instructions/jni/test.S b/tests/build/ssax-instructions/jni/test.S
index cac3659..7dc04e8 100644
--- a/tests/build/ssax-instructions/jni/test.S
+++ b/tests/build/ssax-instructions/jni/test.S
@@ -1,5 +1,10 @@
 // This assembler file contains instructions like ssax
-// that were not assembled properly with NDKr5b's gas version.
+// that were not assembler properly with NDKr5b's gas version.
+//
+
+///////        unittest.s  ////////
+@ For little endian
+@.arch armv7a
 
 .align  2
 armSP_FFTFwd_CToC_SC16_Radix4_fs_OutOfPlace_unsafe:
@@ -15,11 +20,11 @@
 add     r3, r3, ip
 rsb     r3, r3, #8      @ 0x8
 str     r1, [sp, #20]
-ldrd    r4, r5, [r0], ip
-ldrd    r6, r7, [r0], ip
-ldrd    r8, r9, [r0], ip
-ldrd    sl, fp, [r0], r3
-strd    r2, r3, [sp, #8]
+ldrd    r4, [r0], ip
+ldrd    r6, [r0], ip
+ldrd    r8, [r0], ip
+ldrd    sl, [r0], r3
+strd    r2, [sp, #8]
 subs    lr, lr, #2      @ 0x2
 str     lr, [sp, #24]
 sadd16  r2, r4, r8
@@ -39,10 +44,10 @@
 ldrd    r2, [sp, #8]
 ssax    r7, r1, lr
 sasx    fp, r1, lr
-strd    r4, r5, [r2], ip
-strd    r6, r7, [r2], ip
-strd    r8, r9, [r2], ip
-strd    sl, fp, [r2], r3
+strd    r4, [r2], ip
+strd    r6, [r2], ip
+strd    r8, [r2], ip
+strd    sl, [r2], r3
 ldr     lr, [sp, #24]
 bgt     armSP_FFTFwd_CToC_SC16_Radix4_fs_OutOfPlace_unsafe+0x30
 ldrd    r6, [sp]
diff --git a/tests/build/ssax-instructions/test_config.py b/tests/build/ssax-instructions/test_config.py
index 1db330b..84e726b 100644
--- a/tests/build/ssax-instructions/test_config.py
+++ b/tests/build/ssax-instructions/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
diff --git a/tests/build/standalone_toolchain/test.py b/tests/build/standalone_toolchain/test.py
index 3a911f8..29d4a4f 100644
--- a/tests/build/standalone_toolchain/test.py
+++ b/tests/build/standalone_toolchain/test.py
@@ -13,11 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-from ndk.test.spec import BuildConfiguration
 import ndk.testing.standalone_toolchain
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, api, linker, _build_flags):
     return ndk.testing.standalone_toolchain.run_test(
-        ndk_path, config, "foo.cpp", ["--stl=libc++"], []
-    )
+        ndk_path, abi, api, linker, 'foo.cpp', ['--stl=libc++'], [])
diff --git a/tests/build/standalone_toolchainapi_too_low/__init__.py b/tests/build/standalone_toolchain_no_android_support/__init__.py
similarity index 100%
rename from tests/build/standalone_toolchainapi_too_low/__init__.py
rename to tests/build/standalone_toolchain_no_android_support/__init__.py
diff --git a/tests/build/standalone_toolchainapi_too_low/foo.cpp b/tests/build/standalone_toolchain_no_android_support/foo.cpp
similarity index 100%
rename from tests/build/standalone_toolchainapi_too_low/foo.cpp
rename to tests/build/standalone_toolchain_no_android_support/foo.cpp
diff --git a/build/cmake/hooks/pre/Android-Clang.cmake b/tests/build/standalone_toolchain_no_android_support/test.py
similarity index 63%
copy from build/cmake/hooks/pre/Android-Clang.cmake
copy to tests/build/standalone_toolchain_no_android_support/test.py
index 929a37b..2ff8536 100644
--- a/build/cmake/hooks/pre/Android-Clang.cmake
+++ b/tests/build/standalone_toolchain_no_android_support/test.py
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright (C) 2017 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,6 +12,10 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+#
+import ndk.testing.standalone_toolchain
 
-# This is a hook file that will be included by cmake at the beginning of
-# Modules/Platform/Android-Clang.cmake.
+
+def run_test(ndk_path, abi, _api, linker, _build_flags):
+    return ndk.testing.standalone_toolchain.run_test(ndk_path, abi, 21, linker,
+                                                     'foo.cpp', [], [])
diff --git a/tests/build/standalone_toolchain_thumb/test.py b/tests/build/standalone_toolchain_thumb/test.py
index 3060df7..b0fc58e 100644
--- a/tests/build/standalone_toolchain_thumb/test.py
+++ b/tests/build/standalone_toolchain_thumb/test.py
@@ -13,11 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-from ndk.test.spec import BuildConfiguration
 import ndk.testing.standalone_toolchain
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, api, linker, _build_flags):
     return ndk.testing.standalone_toolchain.run_test(
-        ndk_path, config, "foo.cpp", ["--stl=libc++"], ["-mthumb"]
-    )
+        ndk_path, abi, api, linker, 'foo.cpp', ['--stl=libc++'], ['-mthumb'])
diff --git a/tests/build/standalone_toolchain_thumb/test_config.py b/tests/build/standalone_toolchain_thumb/test_config.py
index 096d881..12c5785 100644
--- a/tests/build/standalone_toolchain_thumb/test_config.py
+++ b/tests/build/standalone_toolchain_thumb/test_config.py
@@ -4,6 +4,6 @@
 
 def build_unsupported(test):
     # -mthumb is only relevant for 32-bit ARM.
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
diff --git a/tests/build/standalone_toolchainapi_too_low/test.py b/tests/build/standalone_toolchainapi_too_low/test.py
deleted file mode 100644
index dd0a26c..0000000
--- a/tests/build/standalone_toolchainapi_too_low/test.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#
-# Copyright (C) 2024 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from ndk.test.spec import BuildConfiguration
-import ndk.testing.standalone_toolchain
-import ndk.abis
-
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
-    min_api_for_abi = ndk.abis.min_api_for_abi(config.abi)
-    arch = ndk.abis.abi_to_arch(config.abi)
-    success, out = ndk.testing.standalone_toolchain.run_test(
-        ndk_path, config, "foo.cpp", ["--api", str(min_api_for_abi - 1)], []
-    )
-    if success:
-        return (
-            False,
-            f"{min_api_for_abi} is below minimum supported OS version for "
-            f"{config.abi}, but was not rejected",
-        )
-    expected_error = (
-        f"{min_api_for_abi - 1} is less than minimum platform for {arch} "
-        f"({min_api_for_abi})"
-    )
-    if expected_error not in out:
-        return (
-            False,
-            f'expected error message ("{expected_error}") not seen in output: {out}',
-        )
-    return True, out
diff --git a/tests/build/static_cxx_linkable/CMakeLists.txt b/tests/build/static_cxx_linkable/CMakeLists.txt
deleted file mode 100644
index 1d653f0..0000000
--- a/tests/build/static_cxx_linkable/CMakeLists.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-cmake_minimum_required(VERSION 3.6.0)
-
-add_executable(foo jni/foo.cpp)
diff --git a/tests/build/static_cxx_linkable/__init__.py b/tests/build/static_cxx_linkable/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/static_cxx_linkable/__init__.py
+++ /dev/null
diff --git a/tests/build/static_cxx_linkable/jni/Android.mk b/tests/build/static_cxx_linkable/jni/Android.mk
deleted file mode 100644
index d9d1555..0000000
--- a/tests/build/static_cxx_linkable/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_EXECUTABLE)
diff --git a/tests/build/static_cxx_linkable/jni/foo.cpp b/tests/build/static_cxx_linkable/jni/foo.cpp
deleted file mode 100644
index 90a8c01..0000000
--- a/tests/build/static_cxx_linkable/jni/foo.cpp
+++ /dev/null
@@ -1,7 +0,0 @@
-#include <locale.h>
-
-int main(int argc, char** argv) {
-  locale_t locale = newlocale(LC_ALL, "tr_TR", static_cast<locale_t>(0));
-  freelocale(locale);
-  return 0;
-}
diff --git a/tests/build/strip/__init__.py b/tests/build/strip/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/strip/__init__.py
+++ /dev/null
diff --git a/tests/build/strip/test.py b/tests/build/strip/test.py
index 6633618..af41ffc 100644
--- a/tests/build/strip/test.py
+++ b/tests/build/strip/test.py
@@ -14,15 +14,33 @@
 # limitations under the License.
 #
 """Check for strip --strip-unneeded use."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
+import os
+import subprocess
+import sys
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path, abi, api, linker, build_flags):
     """Checks ndk-build V=1 output for --strip-unneeded flag."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.expect_flag("--strip-unneeded")
-    return verifier.verify_ndk_build().make_test_result_tuple()
+    if build_flags is None:
+        build_flags = []
+
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{api}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+                            encoding='utf-8')
+    out, _ = proc.communicate()
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    out_words = out.split(' ')
+    return '--strip-unneeded' in out_words, out
diff --git a/tests/build/strip_keep_symbols/__init__.py b/tests/build/strip_keep_symbols/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/strip_keep_symbols/__init__.py
+++ /dev/null
diff --git a/tests/build/strip_keep_symbols/test.py b/tests/build/strip_keep_symbols/test.py
index 9ff354e..14bc107 100644
--- a/tests/build/strip_keep_symbols/test.py
+++ b/tests/build/strip_keep_symbols/test.py
@@ -14,16 +14,36 @@
 # limitations under the License.
 #
 """Check for strip --strip-debug use."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
+import os
+import subprocess
+import sys
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path, abi, api, linker, build_flags):
     """Checks ndk-build V=1 output for --strip-debug flag."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.expect_flag("--strip-debug")
-    verifier.expect_not_flag("--strip-unneeded")
-    return verifier.verify_ndk_build().make_test_result_tuple()
+    if build_flags is None:
+        build_flags = []
+
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{api}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+                            encoding='utf-8')
+    out, _ = proc.communicate()
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    out_words = out.split(' ')
+    result = False
+    if '--strip-debug' in out_words and '--strip-unneeded' not in out_words:
+        result = True
+    return result, out
diff --git a/tests/build/strip_keep_symbols_app/__init__.py b/tests/build/strip_keep_symbols_app/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/strip_keep_symbols_app/__init__.py
+++ /dev/null
diff --git a/tests/build/strip_keep_symbols_app/test.py b/tests/build/strip_keep_symbols_app/test.py
index 9ff354e..14bc107 100644
--- a/tests/build/strip_keep_symbols_app/test.py
+++ b/tests/build/strip_keep_symbols_app/test.py
@@ -14,16 +14,36 @@
 # limitations under the License.
 #
 """Check for strip --strip-debug use."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
+import os
+import subprocess
+import sys
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path, abi, api, linker, build_flags):
     """Checks ndk-build V=1 output for --strip-debug flag."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.expect_flag("--strip-debug")
-    verifier.expect_not_flag("--strip-unneeded")
-    return verifier.verify_ndk_build().make_test_result_tuple()
+    if build_flags is None:
+        build_flags = []
+
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{api}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+                            encoding='utf-8')
+    out, _ = proc.communicate()
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    out_words = out.split(' ')
+    result = False
+    if '--strip-debug' in out_words and '--strip-unneeded' not in out_words:
+        result = True
+    return result, out
diff --git a/tests/build/strip_local_overrides_app/__init__.py b/tests/build/strip_local_overrides_app/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/strip_local_overrides_app/__init__.py
+++ /dev/null
diff --git a/tests/build/strip_local_overrides_app/test.py b/tests/build/strip_local_overrides_app/test.py
index 4c2df34..af41ffc 100644
--- a/tests/build/strip_local_overrides_app/test.py
+++ b/tests/build/strip_local_overrides_app/test.py
@@ -14,16 +14,33 @@
 # limitations under the License.
 #
 """Check for strip --strip-unneeded use."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
+import os
+import subprocess
+import sys
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
+def run_test(ndk_path, abi, api, linker, build_flags):
     """Checks ndk-build V=1 output for --strip-unneeded flag."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    verifier.expect_not_flag("--strip-debug")
-    verifier.expect_flag("--strip-unneeded")
-    return verifier.verify_ndk_build().make_test_result_tuple()
+    if build_flags is None:
+        build_flags = []
+
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{api}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+                            encoding='utf-8')
+    out, _ = proc.communicate()
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    out_words = out.split(' ')
+    return '--strip-unneeded' in out_words, out
diff --git a/tests/build/strip_none/__init__.py b/tests/build/strip_none/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/build/strip_none/__init__.py
+++ /dev/null
diff --git a/tests/build/strip_none/test.py b/tests/build/strip_none/test.py
index 49d9068..b1302e4 100644
--- a/tests/build/strip_none/test.py
+++ b/tests/build/strip_none/test.py
@@ -13,19 +13,34 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Check that strip is not used."""
-from pathlib import Path
-from typing import Optional
-
-from ndk.test.spec import BuildConfiguration
-from ndk.testing.flag_verifier import FlagVerifier
+"""Check for strip --strip-unneeded use."""
+import os
+import subprocess
+import sys
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, Optional[str]]:
-    """Checks ndk-build V=1 output for lack of strip."""
-    verifier = FlagVerifier(Path("project"), Path(ndk_path), config)
-    # TODO: Fix this test.
-    # This test has always been wrong, since it was only doing whole word
-    # search for 'strip' and we call strip with its full path.
-    verifier.expect_not_flag("strip")
-    return verifier.verify_ndk_build().make_test_result_tuple()
+def run_test(ndk_path, abi, api, linker, build_flags):
+    """Checks ndk-build V=1 output for --strip-unneeded flag."""
+    if build_flags is None:
+        build_flags = []
+
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{api}',
+        'V=1',
+    ]
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
+                            encoding='utf-8')
+    out, _ = proc.communicate()
+    if proc.returncode != 0:
+        return proc.returncode == 0, out
+
+    out_words = out.split(' ')
+    return 'strip' not in out_words, out
diff --git a/tests/build/system-no-supc/test_config.py b/tests/build/system-no-supc/test_config.py
index 1543ff0..e8b1a9b 100644
--- a/tests/build/system-no-supc/test_config.py
+++ b/tests/build/system-no-supc/test_config.py
@@ -3,4 +3,4 @@
 
 
 def extra_cmake_flags():
-    return ["-DANDROID_STL=system"]
+    return ['-DANDROID_STL=system']
diff --git a/tests/build/test-inet-defs/jni/test-in.c b/tests/build/test-inet-defs/jni/test-in.c
index a145a2e..99ba3d5 100644
--- a/tests/build/test-inet-defs/jni/test-in.c
+++ b/tests/build/test-inet-defs/jni/test-in.c
@@ -19,4 +19,4 @@
 #error INET_ADDRSTRLEN is not defined by <inet/in.h>
 #endif
 
-char in;
+char dummy_in;
diff --git a/tests/build/test-inet-defs/jni/test-in6.c b/tests/build/test-inet-defs/jni/test-in6.c
index 2c0760c..32a12ad 100644
--- a/tests/build/test-inet-defs/jni/test-in6.c
+++ b/tests/build/test-inet-defs/jni/test-in6.c
@@ -27,4 +27,4 @@
 #ifndef IN6_IS_ADDR_MULTICAST
 #error IN6_IS_ADDR_MULTICAST is not defined by <inet/in6.h>
 #endif
-char in6;
+char dummy_in6;
diff --git a/tests/build/unwinder_hidden/test.py b/tests/build/unwinder_hidden/test.py
index 1451792..b0d75b6 100644
--- a/tests/build/unwinder_hidden/test.py
+++ b/tests/build/unwinder_hidden/test.py
@@ -18,10 +18,11 @@
 from pathlib import Path
 import re
 import subprocess
-from typing import Iterator
+from typing import Iterator, List, Optional, Tuple
 
+from ndk.abis import Abi
 import ndk.hosts
-from ndk.test.spec import BuildConfiguration
+from ndk.toolchains import LinkerOption
 
 
 def find_public_unwind_symbols(output: str) -> Iterator[str]:
@@ -29,56 +30,53 @@
     #   274: 00000000000223d8     8 FUNC    GLOBAL DEFAULT   11 _Unwind_GetIP
     # Group 1: Visibility
     # Group 2: Name
-    readelf_regex = re.compile(r"^.*?(\S+)\s+\d+\s+(\S+)$")
+    readelf_regex = re.compile(r'^.*?(\S+)\s+\d+\s+(\S+)$')
     for line in output.splitlines():
         match = readelf_regex.match(line)
         if match is None:
             continue
         visibility, name = match.groups()
-        if name.startswith("_Unwind") and visibility == "DEFAULT":
+        if name.startswith('_Unwind') and visibility == 'DEFAULT':
             yield name
 
 
-def readelf(ndk_path: Path, host: ndk.hosts.Host, library: Path, *args: str) -> str:
+def readelf(ndk_path: Path, host: ndk.hosts.Host, library: Path,
+            *args: str) -> str:
     """Runs readelf, returning the output."""
-    readelf_path = (
-        ndk_path
-        / "toolchains/llvm/prebuilt"
-        / ndk.hosts.get_host_tag()
-        / "bin/llvm-readelf"
-    )
+    readelf_path = (ndk_path / 'toolchains/llvm/prebuilt' /
+                    ndk.hosts.get_host_tag(str(ndk_path)) / 'bin/llvm-readelf')
     if host.is_windows:
-        readelf_path = readelf_path.with_suffix(".exe")
+        readelf_path = readelf_path.with_suffix('.exe')
 
     return subprocess.run(
         [str(readelf_path), *args, str(library)],
         check=True,
-        encoding="utf-8",
+        encoding='utf-8',
         stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-    ).stdout
+        stderr=subprocess.STDOUT).stdout
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path: str, abi: Abi, platform: Optional[int],
+             linker: LinkerOption, build_flags: List[str]) -> Tuple[bool, str]:
     """Check that unwinder symbols are hidden in outputs."""
-    ndk_build = Path(ndk_path) / "ndk-build"
+    ndk_build = Path(ndk_path) / 'ndk-build'
     host = ndk.hosts.get_default_host()
     if host.is_windows:
-        ndk_build = ndk_build.with_suffix(".cmd")
-    project_path = Path("project")
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
+        ndk_build = ndk_build.with_suffix('.cmd')
+    project_path = Path('project')
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
     ]
     subprocess.run(
-        [str(ndk_build), "-C", str(project_path)] + ndk_args,
+        [str(ndk_build), '-C', str(project_path)] + ndk_args,
         check=True,
         stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-    )
+        stderr=subprocess.STDOUT)
 
-    library = project_path / "libs" / str(config.abi) / "libfoo.so"
-    readelf_output = readelf(Path(ndk_path), host, library, "-sW")
+    library = project_path / 'libs' / str(abi) / 'libfoo.so'
+    readelf_output = readelf(Path(ndk_path), host, library, '-sW')
     for symbol in find_public_unwind_symbols(readelf_output):
-        return False, f"Found public unwind symbol: {symbol}"
-    return True, ""
+        return False, f'Found public unwind symbol: {symbol}'
+    return True, ''
diff --git a/tests/build/build_id/__init__.py b/tests/build/vulkan_layers/__init__.py
similarity index 100%
copy from tests/build/build_id/__init__.py
copy to tests/build/vulkan_layers/__init__.py
diff --git a/tests/build/vulkan_layers/jni/Android.mk b/tests/build/vulkan_layers/jni/Android.mk
new file mode 100644
index 0000000..147cf7a
--- /dev/null
+++ b/tests/build/vulkan_layers/jni/Android.mk
@@ -0,0 +1,12 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := vulkan
+LOCAL_SRC_FILES := instance.cpp
+LOCAL_CFLAGS := -std=c++11 -DVK_PROTOTYPES
+LOCAL_LDLIBS := -lvulkan
+LOCAL_STATIC_LIBRARIES := shaderc glslang
+include $(BUILD_EXECUTABLE)
+
+$(call import-module,third_party/shaderc)
+$(call import-module,third_party/vulkan/src/build-android/jni)
diff --git a/tests/build/vulkan_layers/jni/Application.mk b/tests/build/vulkan_layers/jni/Application.mk
new file mode 100644
index 0000000..3fa28b4
--- /dev/null
+++ b/tests/build/vulkan_layers/jni/Application.mk
@@ -0,0 +1,6 @@
+APP_ABI := armeabi-v7a arm64-v8a x86 x86_64 mips mips64
+APP_PLATFORM := android-24
+APP_STL := c++_shared
+APP_MODULES := layer_utils VkLayer_core_validation \
+        VkLayer_parameter_validation VkLayer_object_tracker VkLayer_threading \
+        VkLayer_unique_objects
diff --git a/tests/build/vulkan_layers/jni/instance.cpp b/tests/build/vulkan_layers/jni/instance.cpp
new file mode 100644
index 0000000..8bb7944
--- /dev/null
+++ b/tests/build/vulkan_layers/jni/instance.cpp
@@ -0,0 +1,79 @@
+/*
+ * Vulkan Samples
+ *
+ * Copyright (C) 2015-2016 Valve Corporation
+ * Copyright (C) 2015-2016 LunarG, Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
+ * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+VULKAN_SAMPLE_SHORT_DESCRIPTION
+create and destroy Vulkan instance
+*/
+
+#include <iostream>
+#include <sstream>
+#include <vulkan/vulkan.h>
+
+#define APP_SHORT_NAME "vulkansamples_instance"
+
+int main() {
+
+    /* VULKAN_KEY_START */
+
+    // initialize the VkApplicationInfo structure
+    VkApplicationInfo app_info = {};
+    app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
+    app_info.pNext = NULL;
+    app_info.pApplicationName = APP_SHORT_NAME;
+    app_info.applicationVersion = 1;
+    app_info.pEngineName = APP_SHORT_NAME;
+    app_info.engineVersion = 1;
+    app_info.apiVersion = VK_MAKE_VERSION(1, 0, 0);
+
+    // initialize the VkInstanceCreateInfo structure
+    VkInstanceCreateInfo inst_info = {};
+    inst_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
+    inst_info.pNext = NULL;
+    inst_info.flags = 0;
+    inst_info.pApplicationInfo = &app_info;
+    inst_info.enabledExtensionCount = 0;
+    inst_info.ppEnabledExtensionNames = NULL;
+    inst_info.enabledLayerCount = 0;
+    inst_info.ppEnabledLayerNames = NULL;
+
+    VkInstance inst;
+    VkResult res;
+
+    res = vkCreateInstance(&inst_info, NULL, &inst);
+    if (res == VK_ERROR_INCOMPATIBLE_DRIVER) {
+        std::cout << "cannot find a compatible Vulkan ICD\n";
+        return(-1);
+    } else if (res) {
+        std::cout << "unknown error\n";
+        return(-1);
+    }
+
+    vkDestroyInstance(inst, NULL);
+
+    /* VULKAN_KEY_END */
+
+    return 0;
+}
diff --git a/tests/build/vulkan_layers/test_config.py b/tests/build/vulkan_layers/test_config.py
new file mode 100644
index 0000000..8e525e9
--- /dev/null
+++ b/tests/build/vulkan_layers/test_config.py
@@ -0,0 +1,10 @@
+def build_unsupported(test):
+    # Major build time regression.
+    return True
+    # pylint: disable=unreachable
+
+    # Vulkan support wasn't added until android-24
+    if test.config.api < 24:
+        return test.config.api
+
+    return None
diff --git a/tests/build/warn-execstack/jni/Android.mk b/tests/build/warn-execstack/jni/Android.mk
new file mode 100644
index 0000000..d814e2f
--- /dev/null
+++ b/tests/build/warn-execstack/jni/Android.mk
@@ -0,0 +1,17 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo_solib
+LOCAL_SRC_FILES := foo.cpp
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo_exe
+LOCAL_SRC_FILES := foo.cpp
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := foo_static_exe
+LOCAL_SRC_FILES := foo.cpp
+LOCAL_LDFLAGS := -static
+include $(BUILD_EXECUTABLE)
diff --git a/tests/build/warn-execstack/jni/Application.mk b/tests/build/warn-execstack/jni/Application.mk
new file mode 100644
index 0000000..93d09f8
--- /dev/null
+++ b/tests/build/warn-execstack/jni/Application.mk
@@ -0,0 +1,11 @@
+# Verify that an NDK binary can be linked with --warn-execstack without a
+# warning. Regression test for https://github.com/android-ndk/ndk/issues/779.
+#
+# ndk-build implicitly passes -Wl,--fatal-warnings, so a warning fails the
+# test.
+#
+# Gold is only linker to implement --warn-execstack. ld.bfd doesn't recognize
+# --warn-execstack, and ld.lld *does* recognize the flag, but quietly discards
+# it.
+
+APP_LDFLAGS := -fuse-ld=gold -Wl,--warn-execstack
diff --git a/tests/build/warn-execstack/jni/foo.cpp b/tests/build/warn-execstack/jni/foo.cpp
new file mode 100644
index 0000000..4cce7f6
--- /dev/null
+++ b/tests/build/warn-execstack/jni/foo.cpp
@@ -0,0 +1,3 @@
+int main() {
+  return 0;
+}
diff --git a/tests/build/wchar_t-size/jni/Android.mk b/tests/build/wchar_t-size/jni/Android.mk
new file mode 100644
index 0000000..c6e4266
--- /dev/null
+++ b/tests/build/wchar_t-size/jni/Android.mk
@@ -0,0 +1,9 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := test_wchar_t_size
+LOCAL_SRC_FILES := \
+  test_default.c \
+  test_always_signed.c \
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/tests/build/wchar_t-size/jni/Application.mk b/tests/build/wchar_t-size/jni/Application.mk
new file mode 100644
index 0000000..f51c740
--- /dev/null
+++ b/tests/build/wchar_t-size/jni/Application.mk
@@ -0,0 +1,2 @@
+APP_ABI := all
+APP_PLATFORM := android-3
diff --git a/tests/build/wchar_t-size/jni/test_always_signed.c b/tests/build/wchar_t-size/jni/test_always_signed.c
new file mode 100644
index 0000000..9454ac6
--- /dev/null
+++ b/tests/build/wchar_t-size/jni/test_always_signed.c
@@ -0,0 +1,22 @@
+#include <android/api-level.h>
+
+#if !__LP64__ && !defined(__arm__) || __ANDROID_API__ < 9
+
+// This checks that, by default, wchar_t is 32-bit and
+// WCHAR_MIN/WCHAR_MAX are 32-bit signed on all platforms when
+// _WCHAR_IS_ALWAYS_SIGNED is defined.
+#define _WCHAR_IS_ALWAYS_SIGNED 1
+#include <wchar.h>
+
+#define CONCAT(x,y) CONCAT_(x,y)
+#define CONCAT_(x,y) x ## y
+
+#define STATIC_ASSERT(condition) \
+  static char CONCAT(dummy_,__LINE__)[1 - 2*(!(condition))];
+
+STATIC_ASSERT(sizeof(wchar_t) == 4);
+
+STATIC_ASSERT(WCHAR_MIN == -1-2147483647);
+STATIC_ASSERT(WCHAR_MAX == 2147483647);
+
+#endif
\ No newline at end of file
diff --git a/tests/build/wchar_t-size/jni/test_default.c b/tests/build/wchar_t-size/jni/test_default.c
new file mode 100644
index 0000000..170b28a
--- /dev/null
+++ b/tests/build/wchar_t-size/jni/test_default.c
@@ -0,0 +1,25 @@
+#include <android/api-level.h>
+
+#if !__LP64__ && !defined(__arm__) || __ANDROID_API__ == 3
+
+// This checks that, by default, wchar_t is 32-bit and
+// WCHAR_MIN/WCHAR_MAX are 32-bit signed on all platforms except ARM.
+#include <wchar.h>
+
+#define CONCAT(x,y) CONCAT_(x,y)
+#define CONCAT_(x,y) x ## y
+
+#define STATIC_ASSERT(condition) \
+  static char CONCAT(dummy_,__LINE__)[1 - 2*(!(condition))];
+
+STATIC_ASSERT(sizeof(wchar_t) == 4);
+
+#if defined(__arm__)
+STATIC_ASSERT(WCHAR_MIN == 0U);
+STATIC_ASSERT(WCHAR_MAX == 2*2147483647U + 1U);
+#else
+STATIC_ASSERT(WCHAR_MIN == -1-2147483647);
+STATIC_ASSERT(WCHAR_MAX == 2147483647);
+#endif
+
+#endif
\ No newline at end of file
diff --git a/tests/build/weak_symbols_off_by_default/CMakeLists.txt b/tests/build/weak_symbols_off_by_default/CMakeLists.txt
deleted file mode 100644
index 1efabbf..0000000
--- a/tests/build/weak_symbols_off_by_default/CMakeLists.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(WeakSymbolsBuildSupport CXX)
-add_executable(weak_symbols jni/weak_symbols.cpp)
\ No newline at end of file
diff --git a/tests/build/weak_symbols_off_by_default/jni/Android.mk b/tests/build/weak_symbols_off_by_default/jni/Android.mk
deleted file mode 100644
index 3eb59f0..0000000
--- a/tests/build/weak_symbols_off_by_default/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := weak_symbols
-LOCAL_SRC_FILES := weak_symbols.cpp
-LOCAL_LDLIBS := -landroid
-include $(BUILD_EXECUTABLE)
\ No newline at end of file
diff --git a/tests/build/weak_symbols_off_by_default/jni/Application.mk b/tests/build/weak_symbols_off_by_default/jni/Application.mk
deleted file mode 100644
index 067c76f..0000000
--- a/tests/build/weak_symbols_off_by_default/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_STL := c++_static
\ No newline at end of file
diff --git a/tests/build/weak_symbols_off_by_default/jni/weak_symbols.cpp b/tests/build/weak_symbols_off_by_default/jni/weak_symbols.cpp
deleted file mode 100644
index 80815ad..0000000
--- a/tests/build/weak_symbols_off_by_default/jni/weak_symbols.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <android/versioning.h>
-
-// Create an unavailable symbol that's set to an availability version
-// higher than any ABI's minimum SDK version.
-extern "C" void AFoo() __INTRODUCED_IN(100);
-
-int main(int, char**) {
-  AFoo();
-  return 0;
-}
diff --git a/tests/build/weak_symbols_off_by_default/test_config.py b/tests/build/weak_symbols_off_by_default/test_config.py
deleted file mode 100644
index 9d9b554..0000000
--- a/tests/build/weak_symbols_off_by_default/test_config.py
+++ /dev/null
@@ -1,2 +0,0 @@
-def is_negative_test() -> bool:
-    return True
diff --git a/tests/build/weak_symbols_unguarded_availability/CMakeLists.txt b/tests/build/weak_symbols_unguarded_availability/CMakeLists.txt
deleted file mode 100644
index 825ce5f..0000000
--- a/tests/build/weak_symbols_unguarded_availability/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(WeakSymbolsBuildSupport CXX)
-add_executable(weak_symbols jni/weak_symbols.cpp)
-target_link_libraries(weak_symbols PRIVATE -landroid)
\ No newline at end of file
diff --git a/tests/build/weak_symbols_unguarded_availability/jni/Android.mk b/tests/build/weak_symbols_unguarded_availability/jni/Android.mk
deleted file mode 100644
index 3421812..0000000
--- a/tests/build/weak_symbols_unguarded_availability/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := weak_symbols
-LOCAL_SRC_FILES := weak_symbols.cpp
-LOCAL_LDLIBS := -landroid
-include $(BUILD_EXECUTABLE)
diff --git a/tests/build/weak_symbols_unguarded_availability/jni/Application.mk b/tests/build/weak_symbols_unguarded_availability/jni/Application.mk
deleted file mode 100644
index 44e2196..0000000
--- a/tests/build/weak_symbols_unguarded_availability/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_STL := c++_static
-APP_WEAK_API_DEFS := true
\ No newline at end of file
diff --git a/tests/build/weak_symbols_unguarded_availability/jni/weak_symbols.cpp b/tests/build/weak_symbols_unguarded_availability/jni/weak_symbols.cpp
deleted file mode 100644
index 80815ad..0000000
--- a/tests/build/weak_symbols_unguarded_availability/jni/weak_symbols.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-#include <android/versioning.h>
-
-// Create an unavailable symbol that's set to an availability version
-// higher than any ABI's minimum SDK version.
-extern "C" void AFoo() __INTRODUCED_IN(100);
-
-int main(int, char**) {
-  AFoo();
-  return 0;
-}
diff --git a/tests/build/weak_symbols_unguarded_availability/test_config.py b/tests/build/weak_symbols_unguarded_availability/test_config.py
deleted file mode 100644
index ad19b52..0000000
--- a/tests/build/weak_symbols_unguarded_availability/test_config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_WEAK_API_DEFS=ON"]
-
-
-def is_negative_test() -> bool:
-    return True
diff --git a/tests/build/wrap_sh/project/jni/Application.mk b/tests/build/wrap_sh/project/jni/Application.mk
index 470969c..5622b10 100644
--- a/tests/build/wrap_sh/project/jni/Application.mk
+++ b/tests/build/wrap_sh/project/jni/Application.mk
@@ -1,5 +1,4 @@
 APP_WRAP_SH_armeabi-v7a := armeabi-v7a.sh
 APP_WRAP_SH_arm64-v8a := arm64-v8a.sh
-APP_WRAP_SH_riscv64 := riscv64.sh
 APP_WRAP_SH_x86:= x86.sh
 APP_WRAP_SH_x86_64:= x86_64.sh
diff --git a/tests/build/wrap_sh/project/jni/riscv64.sh b/tests/build/wrap_sh/project/jni/riscv64.sh
deleted file mode 100644
index 52f1642..0000000
--- a/tests/build/wrap_sh/project/jni/riscv64.sh
+++ /dev/null
@@ -1 +0,0 @@
-riscv64
diff --git a/tests/build/wrap_sh/test.py b/tests/build/wrap_sh/test.py
index 5ea1057..10babab 100644
--- a/tests/build/wrap_sh/test.py
+++ b/tests/build/wrap_sh/test.py
@@ -13,48 +13,42 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Check for correct wrap.sh from ndk-build.
+"""Check for correct link order from ndk-build.
 """
 import os
 import subprocess
 import sys
 import textwrap
-from pathlib import Path
-
-from ndk.test.spec import BuildConfiguration
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, platform, linker, build_flags):
     """Checks that the proper wrap.sh scripts were installed."""
-    ndk_build = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = Path("project")
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
     ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", str(project_path)] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     out, _ = proc.communicate()
+    out = out.decode('utf-8')
     if proc.returncode != 0:
         return proc.returncode == 0, out
 
-    wrap_sh = project_path / "libs" / config.abi / "wrap.sh"
-    if not wrap_sh.exists():
-        return False, f"{wrap_sh} does not exist"
+    wrap_sh = os.path.join(project_path, 'libs', abi, 'wrap.sh')
+    if not os.path.exists(wrap_sh):
+        return False, '{} does not exist'.format(wrap_sh)
 
-    contents = wrap_sh.read_text(encoding="utf-8").strip()
-    if contents != config.abi:
-        return False, textwrap.dedent(
-            f"""\
+    with open(wrap_sh) as wrap_sh_file:
+        contents = wrap_sh_file.read().strip()
+    if contents != abi:
+        return False, textwrap.dedent("""\
             wrap.sh file had wrong contents:
-            Expected: {config.abi}
-            Actual: {contents}"""
-        )
+            Expected: {}
+            Actual: {}""".format(abi, contents))
 
-    return True, ""
+    return True, ''
diff --git a/tests/build/wrap_sh_generic/test.py b/tests/build/wrap_sh_generic/test.py
index 27058ae..9151e1f 100644
--- a/tests/build/wrap_sh_generic/test.py
+++ b/tests/build/wrap_sh_generic/test.py
@@ -19,42 +19,36 @@
 import subprocess
 import sys
 import textwrap
-from pathlib import Path
-
-from ndk.test.spec import BuildConfiguration
 
 
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, platform, linker, build_flags):
     """Checks that the proper wrap.sh scripts were installed."""
-    ndk_build = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = Path("project")
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
     ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", str(project_path)] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     out, _ = proc.communicate()
+    out = out.decode('utf-8')
     if proc.returncode != 0:
         return proc.returncode == 0, out
 
-    wrap_sh = project_path / "libs" / config.abi / "wrap.sh"
-    if not wrap_sh.exists():
-        return False, "{} does not exist".format(wrap_sh)
+    wrap_sh = os.path.join(project_path, 'libs', abi, 'wrap.sh')
+    if not os.path.exists(wrap_sh):
+        return False, '{} does not exist'.format(wrap_sh)
 
-    contents = wrap_sh.read_text(encoding="utf-8").strip()
-    if contents != "generic":
-        return False, textwrap.dedent(
-            f"""\
-            {config.abi} wrap.sh file had wrong contents:
+    with open(wrap_sh) as wrap_sh_file:
+        contents = wrap_sh_file.read().strip()
+    if contents != 'generic':
+        return False, textwrap.dedent("""\
+            wrap.sh file had wrong contents:
             Expected: generic
-            Actual: {contents}"""
-        )
+            Actual: {}""".format(abi, contents))
 
-    return True, ""
+    return True, ''
diff --git a/tests/build/wrap_sh_none/test.py b/tests/build/wrap_sh_none/test.py
index e688333..c6ea862 100644
--- a/tests/build/wrap_sh_none/test.py
+++ b/tests/build/wrap_sh_none/test.py
@@ -13,35 +13,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-"""Check for no wrap.sh from ndk-build."""
+"""Check for correct link order from ndk-build.
+"""
 import os
 import subprocess
 import sys
 
-from ndk.test.spec import BuildConfiguration
 
-
-def run_test(ndk_path: str, config: BuildConfiguration) -> tuple[bool, str]:
+def run_test(ndk_path, abi, platform, linker, build_flags):
     """Checks that the proper wrap.sh scripts were installed."""
-    ndk_build = os.path.join(ndk_path, "ndk-build")
-    if sys.platform == "win32":
-        ndk_build += ".cmd"
-    project_path = "project"
-    ndk_args = [
-        f"APP_ABI={config.abi}",
-        f"APP_PLATFORM=android-{config.api}",
+    ndk_build = os.path.join(ndk_path, 'ndk-build')
+    if sys.platform == 'win32':
+        ndk_build += '.cmd'
+    project_path = 'project'
+    ndk_args = build_flags + [
+        f'APP_ABI={abi}',
+        f'APP_LD={linker.value}',
+        f'APP_PLATFORM=android-{platform}',
     ]
-    proc = subprocess.Popen(
-        [ndk_build, "-C", project_path] + ndk_args,
-        stdout=subprocess.PIPE,
-        stderr=subprocess.STDOUT,
-        encoding="utf-8",
-    )
+    proc = subprocess.Popen([ndk_build, '-C', project_path] + ndk_args,
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     out, _ = proc.communicate()
+    out = out.decode('utf-8')
     if proc.returncode != 0:
         return proc.returncode == 0, out
 
-    wrap_sh = os.path.join(project_path, "libs", config.abi, "wrap.sh")
+    wrap_sh = os.path.join(project_path, 'libs', abi, 'wrap.sh')
     if os.path.exists(wrap_sh):
-        return False, "{} should not exist".format(wrap_sh)
-    return True, ""
+        return False, '{} should not exist'.format(wrap_sh)
+    return True, ''
diff --git a/tests/device/android_support/jni/Android.mk b/tests/device/android_support/jni/Android.mk
new file mode 100644
index 0000000..0a937d3
--- /dev/null
+++ b/tests/device/android_support/jni/Android.mk
@@ -0,0 +1,45 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#  * Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#  * Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in
+#    the documentation and/or other materials provided with the
+#    distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+# SUCH DAMAGE.
+#
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := android_support_unittests
+LOCAL_SRC_FILES := \
+    inttypes_test.cpp \
+    math_test.cpp \
+    platform_version.cpp \
+    stdlib_test.cpp \
+    wcstox_test.cpp \
+    swprintf_test.cpp \
+
+LOCAL_STATIC_LIBRARIES := android_support googletest_static googletest_main
+include $(BUILD_EXECUTABLE)
+
+$(call import-module,android/support)
+$(call import-module,third_party/googletest)
diff --git a/tests/build/static_cxx_linkable/jni/Application.mk b/tests/device/android_support/jni/Application.mk
similarity index 100%
rename from tests/build/static_cxx_linkable/jni/Application.mk
rename to tests/device/android_support/jni/Application.mk
diff --git a/tests/device/android_support/jni/fixed_in.h b/tests/device/android_support/jni/fixed_in.h
new file mode 100644
index 0000000..af66891
--- /dev/null
+++ b/tests/device/android_support/jni/fixed_in.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#pragma once
+
+#include <sys/cdefs.h>
+
+#include "platform_version.h"
+
+#define FIXED_IN(api)                                          \
+  if (__ANDROID_API__ < (api) && platform_version() < (api)) { \
+    return;                                                    \
+  }
diff --git a/tests/device/android_support/jni/inttypes_test.cpp b/tests/device/android_support/jni/inttypes_test.cpp
new file mode 100644
index 0000000..8acbbfb
--- /dev/null
+++ b/tests/device/android_support/jni/inttypes_test.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <gtest/gtest.h>
+
+#include <inttypes.h>
+
+// https://github.com/android-ndk/ndk/issues/502
+TEST(inttypes, imaxabs) {
+  ASSERT_EQ(INTMAX_MAX, imaxabs(-INTMAX_MAX));
+}
+
+// https://github.com/android-ndk/ndk/issues/502
+TEST(inttypes, imaxdiv) {
+  imaxdiv_t r = imaxdiv(-5, 3);
+  ASSERT_EQ(-1, r.quot);
+  ASSERT_EQ(-2, r.rem);
+}
diff --git a/tests/device/android_support/jni/math_test.cpp b/tests/device/android_support/jni/math_test.cpp
new file mode 100644
index 0000000..251454e
--- /dev/null
+++ b/tests/device/android_support/jni/math_test.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <gtest/gtest.h>
+
+#include <math.h>
+
+// https://github.com/android-ndk/ndk/issues/502
+TEST(math, frexp) {
+  int exp;
+  double dr = frexp(1024.0, &exp);
+  ASSERT_DOUBLE_EQ(1024.0, scalbn(dr, exp));
+}
+
+// https://github.com/android-ndk/ndk/issues/502
+TEST(math, frexpf) {
+  int exp;
+  float fr = frexpf(1024.0f, &exp);
+  ASSERT_FLOAT_EQ(1024.0f, scalbnf(fr, exp));
+}
+
+// https://github.com/android-ndk/ndk/issues/502
+TEST(math, frexpl) {
+  int exp;
+  long double ldr = frexpl(1024.0L, &exp);
+  ASSERT_DOUBLE_EQ(1024.0L, scalbnl(ldr, exp));
+}
+
+// https://github.com/android-ndk/ndk/issues/502
+TEST(math, log2f) {
+  ASSERT_FLOAT_EQ(12.0f, log2f(4096.0f));
+}
diff --git a/tests/device/android_support/jni/platform_version.cpp b/tests/device/android_support/jni/platform_version.cpp
new file mode 100644
index 0000000..f9d5222
--- /dev/null
+++ b/tests/device/android_support/jni/platform_version.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include "platform_version.h"
+
+#include <sys/system_properties.h>
+
+#include <string>
+
+int platform_version() {
+  static int platform_version_ = 0;
+  if (platform_version_ == 0) {
+    char value[PROP_VALUE_MAX];
+    if (__system_property_get("ro.build.version.sdk", value) == 0) {
+      platform_version_ = -1;
+    } else {
+      platform_version_ = std::stoi(value);
+    }
+  }
+
+  return platform_version_;
+}
diff --git a/tests/device/android_support/jni/platform_version.h b/tests/device/android_support/jni/platform_version.h
new file mode 100644
index 0000000..d52b9d2
--- /dev/null
+++ b/tests/device/android_support/jni/platform_version.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#pragma once
+
+int platform_version();
diff --git a/tests/device/android_support/jni/stdlib_test.cpp b/tests/device/android_support/jni/stdlib_test.cpp
new file mode 100644
index 0000000..09aa4d7
--- /dev/null
+++ b/tests/device/android_support/jni/stdlib_test.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <gtest/gtest.h>
+
+#include <stdlib.h>
+
+TEST(stdlib, posix_memalign_sweep) {
+  void* ptr;
+
+  // These should all fail.
+  for (size_t align = 0; align < sizeof(long); align++) {
+    ASSERT_EQ(EINVAL, posix_memalign(&ptr, align, 256))
+        << "Unexpected value at align " << align;
+  }
+
+  // Verify powers of 2 up to 2048 allocate, and verify that all other
+  // alignment values between the powers of 2 fail.
+  size_t last_align = sizeof(long);
+  for (size_t align = sizeof(long); align <= 2048; align <<= 1) {
+    // Try all of the non power of 2 values from the last until this value.
+    for (size_t fail_align = last_align + 1; fail_align < align; fail_align++) {
+      ASSERT_EQ(EINVAL, posix_memalign(&ptr, fail_align, 256))
+          << "Unexpected success at align " << fail_align;
+    }
+    ASSERT_EQ(0, posix_memalign(&ptr, align, 256))
+        << "Unexpected failure at align " << align;
+    ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) & (align - 1))
+        << "Did not return a valid aligned ptr " << ptr << " expected alignment " << align;
+    free(ptr);
+    last_align = align;
+  }
+}
+
+TEST(stdlib, posix_memalign_various_sizes) {
+  std::vector<size_t> sizes{1, 4, 8, 256, 1024, 65000, 128000, 256000, 1000000};
+  for (auto size : sizes) {
+    void* ptr;
+    ASSERT_EQ(0, posix_memalign(&ptr, 16, 1))
+        << "posix_memalign failed at size " << size;
+    ASSERT_EQ(0U, reinterpret_cast<uintptr_t>(ptr) & 0xf)
+        << "Pointer not aligned at size " << size << " ptr " << ptr;
+    free(ptr);
+  }
+}
diff --git a/tests/device/android_support/jni/swprintf_test.cpp b/tests/device/android_support/jni/swprintf_test.cpp
new file mode 100644
index 0000000..53c412d
--- /dev/null
+++ b/tests/device/android_support/jni/swprintf_test.cpp
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <errno.h>
+#include <math.h>
+#include <stdio.h>
+#include <wchar.h>
+
+#include <gtest/gtest.h>
+
+#include "fixed_in.h"
+
+TEST(stdio, swprintf) {
+  constexpr size_t nchars = 32;
+  wchar_t buf[nchars];
+
+  ASSERT_EQ(2, swprintf(buf, nchars, L"ab")) << strerror(errno);
+  ASSERT_EQ(std::wstring(L"ab"), buf);
+  ASSERT_EQ(5, swprintf(buf, nchars, L"%s", "abcde"));
+  ASSERT_EQ(std::wstring(L"abcde"), buf);
+
+  // Unlike swprintf(), swprintf() returns -1 in case of truncation
+  // and doesn't necessarily zero-terminate the output!
+  ASSERT_EQ(-1, swprintf(buf, 4, L"%s", "abcde"));
+
+  const char kString[] = "Hello, World";
+  ASSERT_EQ(12, swprintf(buf, nchars, L"%s", kString));
+  ASSERT_EQ(std::wstring(L"Hello, World"), buf);
+  ASSERT_EQ(12, swprintf(buf, 13, L"%s", kString));
+  ASSERT_EQ(std::wstring(L"Hello, World"), buf);
+}
+
+// https://github.com/android-ndk/ndk/issues/437
+TEST(stdio, swprintf_a) {
+  FIXED_IN(__ANDROID_API_L__)
+
+  constexpr size_t nchars = 32;
+  wchar_t buf[nchars];
+
+  ASSERT_EQ(20, swprintf(buf, nchars, L"%a", 3.1415926535));
+  ASSERT_EQ(std::wstring(L"0x1.921fb54411744p+1"), buf);
+}
+
+TEST(stdio, swprintf_ls) {
+  FIXED_IN(__ANDROID_API_L__)
+
+  constexpr size_t nchars = 32;
+  wchar_t buf[nchars];
+
+  static const wchar_t kWideString[] = L"Hello\uff41 World";
+  ASSERT_EQ(12, swprintf(buf, nchars, L"%ls", kWideString));
+  ASSERT_EQ(std::wstring(kWideString), buf);
+  ASSERT_EQ(12, swprintf(buf, 13, L"%ls", kWideString));
+  ASSERT_EQ(std::wstring(kWideString), buf);
+}
+
+template <typename T>
+static void CheckInfNan(int snprintf_fn(T*, size_t, const T*, ...),
+                        int sscanf_fn(const T*, const T*, ...),
+                        const T* fmt_string, const T* fmt, const T* fmt_plus,
+                        const T* minus_inf, const T* inf_, const T* plus_inf,
+                        const T* minus_nan, const T* nan_, const T* plus_nan) {
+  T buf[BUFSIZ];
+  float f;
+
+  // NaN.
+
+  snprintf_fn(buf, sizeof(buf), fmt, nanf(""));
+  EXPECT_STREQ(nan_, buf) << fmt;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_TRUE(isnan(f));
+
+  snprintf_fn(buf, sizeof(buf), fmt, -nanf(""));
+  EXPECT_STREQ(minus_nan, buf) << fmt;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_TRUE(isnan(f));
+
+  snprintf_fn(buf, sizeof(buf), fmt_plus, nanf(""));
+  EXPECT_STREQ(plus_nan, buf) << fmt_plus;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_TRUE(isnan(f));
+
+  snprintf_fn(buf, sizeof(buf), fmt_plus, -nanf(""));
+  EXPECT_STREQ(minus_nan, buf) << fmt_plus;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_TRUE(isnan(f));
+
+  // Inf.
+
+  snprintf_fn(buf, sizeof(buf), fmt, HUGE_VALF);
+  EXPECT_STREQ(inf_, buf) << fmt;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_EQ(HUGE_VALF, f);
+
+  snprintf_fn(buf, sizeof(buf), fmt, -HUGE_VALF);
+  EXPECT_STREQ(minus_inf, buf) << fmt;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_EQ(-HUGE_VALF, f);
+
+  snprintf_fn(buf, sizeof(buf), fmt_plus, HUGE_VALF);
+  EXPECT_STREQ(plus_inf, buf) << fmt_plus;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_EQ(HUGE_VALF, f);
+
+  snprintf_fn(buf, sizeof(buf), fmt_plus, -HUGE_VALF);
+  EXPECT_STREQ(minus_inf, buf) << fmt_plus;
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f));
+  EXPECT_EQ(-HUGE_VALF, f);
+
+  // Check case-insensitivity.
+  snprintf_fn(buf, sizeof(buf), fmt_string, "[InFiNiTy]");
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f)) << buf;
+  EXPECT_EQ(HUGE_VALF, f);
+  snprintf_fn(buf, sizeof(buf), fmt_string, "[NaN]");
+  EXPECT_EQ(1, sscanf_fn(buf, fmt, &f)) << buf;
+  EXPECT_TRUE(isnan(f));
+}
+
+TEST(STDIO_TEST, swprintf_swscanf_inf_nan) {
+  FIXED_IN(__ANDROID_API_O__)
+
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%a]", L"[%+a]",
+              L"[-inf]", L"[inf]", L"[+inf]",
+              L"[-nan]", L"[nan]", L"[+nan]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%A]", L"[%+A]",
+              L"[-INF]", L"[INF]", L"[+INF]",
+              L"[-NAN]", L"[NAN]", L"[+NAN]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%e]", L"[%+e]",
+              L"[-inf]", L"[inf]", L"[+inf]",
+              L"[-nan]", L"[nan]", L"[+nan]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%E]", L"[%+E]",
+              L"[-INF]", L"[INF]", L"[+INF]",
+              L"[-NAN]", L"[NAN]", L"[+NAN]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%f]", L"[%+f]",
+              L"[-inf]", L"[inf]", L"[+inf]",
+              L"[-nan]", L"[nan]", L"[+nan]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%F]", L"[%+F]",
+              L"[-INF]", L"[INF]", L"[+INF]",
+              L"[-NAN]", L"[NAN]", L"[+NAN]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%g]", L"[%+g]",
+              L"[-inf]", L"[inf]", L"[+inf]",
+              L"[-nan]", L"[nan]", L"[+nan]");
+  CheckInfNan(swprintf, swscanf, L"%s",
+              L"[%G]", L"[%+G]",
+              L"[-INF]", L"[INF]", L"[+INF]",
+              L"[-NAN]", L"[NAN]", L"[+NAN]");
+}
diff --git a/tests/device/android_support/jni/wcstox_test.cpp b/tests/device/android_support/jni/wcstox_test.cpp
new file mode 100644
index 0000000..b8dbb41
--- /dev/null
+++ b/tests/device/android_support/jni/wcstox_test.cpp
@@ -0,0 +1,245 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#include <inttypes.h>
+#include <math.h>
+#include <wchar.h>
+
+#include <limits>
+
+#include <gtest/gtest.h>
+
+#include "fixed_in.h"
+
+template <typename T>
+using WcsToIntFn = T (*)(const wchar_t*, wchar_t**, int);
+
+template <typename T>
+using WcsToFloatFn = T (*)(const wchar_t*, wchar_t**);
+
+template <typename T>
+void TestSingleWcsToFloat(WcsToFloatFn<T> fn, const wchar_t* str,
+                          T expected_value, ptrdiff_t expected_len) {
+  wchar_t* p;
+  ASSERT_EQ(expected_value, fn(str, &p));
+  ASSERT_EQ(expected_len, p - str);
+}
+
+template <typename T>
+void TestWcsToFloat(WcsToFloatFn<T> fn) {
+  TestSingleWcsToFloat(fn, L"123", static_cast<T>(123.0), 3);
+  TestSingleWcsToFloat(fn, L"123#", static_cast<T>(123.0), 3);
+  TestSingleWcsToFloat(fn, L"   123 45", static_cast<T>(123.0), 6);
+  TestSingleWcsToFloat(fn, L"9.0", static_cast<T>(9.0), 3);
+  TestSingleWcsToFloat(fn, L"-9.0", static_cast<T>(-9.0), 4);
+  TestSingleWcsToFloat(fn, L" \t\v\f\r\n9.0", static_cast<T>(9.0), 9);
+}
+
+template <typename T>
+void TestWcsToFloatHexFloats(WcsToFloatFn<T> fn) {
+  TestSingleWcsToFloat(fn, L"0.9e1", static_cast<T>(9.0L), 5);
+  TestSingleWcsToFloat(fn, L"0x1.2p3", static_cast<T>(9.0L), 7);
+  TestSingleWcsToFloat(fn, L"+1e+100", static_cast<T>(1e100L), 7);
+  TestSingleWcsToFloat(fn, L"0x10000.80", static_cast<T>(65536.50L), 10);
+}
+
+template <typename T>
+void TestWcsToFloatInfNan(WcsToFloatFn<T> fn) {
+  ASSERT_TRUE(isnan(fn(L"+nan", nullptr)));
+  ASSERT_TRUE(isnan(fn(L"nan", nullptr)));
+  ASSERT_TRUE(isnan(fn(L"-nan", nullptr)));
+
+  ASSERT_TRUE(isnan(fn(L"+nan(0xff)", nullptr)));
+  ASSERT_TRUE(isnan(fn(L"nan(0xff)", nullptr)));
+  ASSERT_TRUE(isnan(fn(L"-nan(0xff)", nullptr)));
+
+  wchar_t* p;
+  ASSERT_TRUE(isnan(fn(L"+nanny", &p)));
+  ASSERT_STREQ(L"ny", p);
+  ASSERT_TRUE(isnan(fn(L"nanny", &p)));
+  ASSERT_STREQ(L"ny", p);
+  ASSERT_TRUE(isnan(fn(L"-nanny", &p)));
+  ASSERT_STREQ(L"ny", p);
+
+  ASSERT_EQ(0, fn(L"muppet", &p));
+  ASSERT_STREQ(L"muppet", p);
+  ASSERT_EQ(0, fn(L"  muppet", &p));
+  ASSERT_STREQ(L"  muppet", p);
+
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"+inf", nullptr));
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"inf", nullptr));
+  ASSERT_EQ(-std::numeric_limits<T>::infinity(), fn(L"-inf", nullptr));
+
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"+infinity", nullptr));
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"infinity", nullptr));
+  ASSERT_EQ(-std::numeric_limits<T>::infinity(), fn(L"-infinity", nullptr));
+
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"+infinitude", &p));
+  ASSERT_STREQ(L"initude", p);
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"infinitude", &p));
+  ASSERT_STREQ(L"initude", p);
+  ASSERT_EQ(-std::numeric_limits<T>::infinity(), fn(L"-infinitude", &p));
+  ASSERT_STREQ(L"initude", p);
+
+  // Check case-insensitivity.
+  ASSERT_EQ(std::numeric_limits<T>::infinity(), fn(L"InFiNiTy", nullptr));
+  ASSERT_TRUE(isnan(fn(L"NaN", nullptr)));
+}
+
+TEST(wchar, wcstof) {
+  TestWcsToFloat(wcstof);
+}
+
+TEST(wchar, wcstof_hex_floats) {
+  FIXED_IN(__ANDROID_API_O__)
+  TestWcsToFloatHexFloats(wcstof);
+}
+
+TEST(wchar, wcstof_hex_inf_nan) {
+  FIXED_IN(__ANDROID_API_O__)
+  TestWcsToFloatInfNan(wcstof);
+}
+
+TEST(wchar, wcstod) {
+  TestWcsToFloat(wcstod);
+}
+
+TEST(wchar, wcstod_hex_floats) {
+  FIXED_IN(__ANDROID_API_O__)
+  TestWcsToFloatHexFloats(wcstod);
+}
+
+TEST(wchar, wcstod_hex_inf_nan) {
+  FIXED_IN(__ANDROID_API_O__)
+  TestWcsToFloatInfNan(wcstod);
+}
+
+TEST(wchar, wcstold) {
+  TestWcsToFloat(wcstold);
+}
+
+TEST(wchar, wcstold_hex_floats) {
+  FIXED_IN(__ANDROID_API_O__)
+  TestWcsToFloatHexFloats(wcstold);
+}
+
+TEST(wchar, wcstold_hex_inf_nan) {
+  FIXED_IN(__ANDROID_API_O__)
+  TestWcsToFloatInfNan(wcstold);
+}
+
+template <typename T>
+void TestSingleWcsToInt(WcsToIntFn<T> fn, const wchar_t* str, int base,
+                        T expected_value, ptrdiff_t expected_len) {
+  wchar_t* p;
+  ASSERT_EQ(expected_value, fn(str, &p, base));
+  ASSERT_EQ(expected_len, p - str) << str;
+}
+
+template <typename T>
+void TestWcsToInt(WcsToIntFn<T> fn) {
+  TestSingleWcsToInt(fn, L"123", 10, static_cast<T>(123), 3);
+  TestSingleWcsToInt(fn, L"123", 0, static_cast<T>(123), 3);
+  TestSingleWcsToInt(fn, L"123#", 10, static_cast<T>(123), 3);
+  TestSingleWcsToInt(fn, L"01000", 8, static_cast<T>(512), 5);
+  TestSingleWcsToInt(fn, L"01000", 0, static_cast<T>(512), 5);
+  TestSingleWcsToInt(fn, L"   123 45", 0, static_cast<T>(123), 6);
+  TestSingleWcsToInt(fn, L"  -123", 0, static_cast<T>(-123), 6);
+  TestSingleWcsToInt(fn, L"0x10000", 0, static_cast<T>(65536), 7);
+}
+
+template <typename T>
+void TestWcsToIntLimits(WcsToIntFn<T> fn, const wchar_t* min_str,
+                        const wchar_t* max_str) {
+  if (std::is_signed<T>::value) {
+    ASSERT_EQ(std::numeric_limits<T>::min(), fn(min_str, nullptr, 0)) << min_str;
+  } else {
+    // If the subject sequence begins with a <hyphen-minus>, the value resulting
+    // from the conversion shall be negated.
+    // http://pubs.opengroup.org/onlinepubs/9699919799/functions/strtoul.html
+    ASSERT_EQ(std::numeric_limits<T>::max(), fn(min_str, nullptr, 0)) << min_str;
+  }
+  ASSERT_EQ(std::numeric_limits<T>::max(), fn(max_str, nullptr, 0)) << max_str;
+}
+
+TEST(wchar, wcstol) {
+  TestWcsToInt(wcstol);
+}
+
+TEST(wchar, wcstol_limits) {
+  if (sizeof(long) == 8) {
+    TestWcsToIntLimits(wcstol, L"-9223372036854775809", L"9223372036854775808");
+  } else {
+    TestWcsToIntLimits(wcstol, L"-2147483649", L"2147483648");
+  }
+}
+
+TEST(wchar, wcstoul) {
+  TestWcsToInt(wcstoul);
+}
+
+TEST(wchar, wcstoul_limits) {
+  if (sizeof(long) == 8) {
+    TestWcsToIntLimits(wcstoul, L"-1", L"18446744073709551616");
+  } else {
+    TestWcsToIntLimits(wcstoul, L"-1", L"4294967296");
+  }
+}
+
+TEST(wchar, wcstoll) {
+  TestWcsToInt(wcstoll);
+}
+
+TEST(wchar, wcstoll_limits) {
+  TestWcsToIntLimits(wcstoll, L"-9223372036854775809", L"9223372036854775808");
+}
+
+TEST(wchar, wcstoull) {
+  TestWcsToInt(wcstoull);
+}
+
+TEST(wchar, wcstoull_limits) {
+  TestWcsToIntLimits(wcstoull, L"-1", L"18446744073709551616");
+}
+
+TEST(wchar, wcstoimax) {
+  TestWcsToInt(wcstoimax);
+}
+
+TEST(wchar, wcstoimax_limits) {
+  TestWcsToIntLimits(wcstoimax, L"-9223372036854775809",
+                     L"9223372036854775808");
+}
+
+TEST(wchar, wcstoumax) {
+  TestWcsToInt(wcstoumax);
+}
+
+TEST(wchar, wcstoumax_limits) {
+  TestWcsToIntLimits(wcstoumax, L"-1", L"18446744073709551616");
+}
diff --git a/tests/device/android_support/test_config.py b/tests/device/android_support/test_config.py
new file mode 100644
index 0000000..2c7511a
--- /dev/null
+++ b/tests/device/android_support/test_config.py
@@ -0,0 +1,4 @@
+def build_unsupported(test):
+    if test.config.api >= 21:
+        return f'android-{test.config.api}'
+    return None
diff --git a/tests/device/asan-smoke/jni/asan_oob_test.cc b/tests/device/asan-smoke/jni/asan_oob_test.cc
index 18db3e7..6126f3a 100644
--- a/tests/device/asan-smoke/jni/asan_oob_test.cc
+++ b/tests/device/asan-smoke/jni/asan_oob_test.cc
@@ -6,20 +6,14 @@
 // License. See LICENSE.TXT for details.
 //
 //===----------------------------------------------------------------------===//
-#include <stdint.h>
+//
+// This file is a part of AddressSanitizer, an address sanity checker.
+//
+//===----------------------------------------------------------------------===//
 #include <stdio.h>
-#include <stdlib.h>
-
-#include <string>
 
 #include <gtest/gtest.h>
 
-#if __LP64__ || defined(_WIN64)
-#  define SANITIZER_WORDSIZE 64
-#else
-#  define SANITIZER_WORDSIZE 32
-#endif
-
 #define NOINLINE __attribute__((noinline))
 
 typedef uint8_t   U1;
@@ -36,16 +30,6 @@
 #endif
 }
 
-// This function returns its parameter but in such a way that compiler
-// can not prove it.
-template<class T>
-NOINLINE
-static T Ident(T t) {
-  T ret = t;
-  break_optimization(&ret);
-  return ret;
-}
-
 NOINLINE void *malloc_fff(size_t size) {
   void *res = malloc/**/(size); break_optimization(0); return res;}
 NOINLINE void *malloc_eee(size_t size) {
@@ -68,14 +52,6 @@
   *a = 0;
 }
 
-NOINLINE void asan_write_sized_aligned(uint8_t *p, size_t size) {
-  EXPECT_EQ(0U, ((uintptr_t)p % size));
-  if      (size == 1) asan_write((uint8_t*)p);
-  else if (size == 2) asan_write((uint16_t*)p);
-  else if (size == 4) asan_write((uint32_t*)p);
-  else if (size == 8) asan_write((uint64_t*)p);
-}
-
 template<typename T>
 NOINLINE void oob_test(int size, int off) {
   char *p = (char*)malloc_aaa(size);
@@ -85,53 +61,37 @@
   free_aaa(p);
 }
 
-static std::string GetLeftOOBMessage(int off) {
-  char str[100];
-  sprintf(str, "is located.*%d byte.*before", off);
-  return str;
-}
-
-static std::string GetRightOOBMessage(int off) {
-  char str[100];
-#if !defined(_WIN32)
-  // FIXME: Fix PR42868 and remove SEGV match.
-  sprintf(str, "is located.*%d byte.*after|SEGV", off);
-#else
-  // `|` doesn't work in googletest's regexes on Windows,
-  // see googletest/docs/advanced.md#regular-expression-syntax
-  // But it's not needed on Windows anyways.
-  sprintf(str, "is located.*%d byte.*after", off);
-#endif
-  return str;
-}
-
 template<typename T>
 void OOBTest() {
+  char expected_str[100];
   for (int size = sizeof(T); size < 20; size += 5) {
-    for (int i = -5; i < 0; i++)
-      EXPECT_DEATH(oob_test<T>(size, i), GetLeftOOBMessage(-i));
+    for (int i = -5; i < 0; i++) {
+      const char *str =
+          "is located.*%d byte.*to the left";
+      sprintf(expected_str, str, abs(i));
+      EXPECT_DEATH(oob_test<T>(size, i), expected_str);
+    }
 
     for (int i = 0; i < (int)(size - sizeof(T) + 1); i++)
       oob_test<T>(size, i);
 
     for (int i = size - sizeof(T) + 1; i <= (int)(size + 2 * sizeof(T)); i++) {
+      const char *str =
+          "is located.*%d byte.*to the right";
+      int off = i >= size ? (i - size) : 0;
       // we don't catch unaligned partially OOB accesses.
       if (i % sizeof(T)) continue;
-      int off = i >= size ? (i - size) : 0;
-      EXPECT_DEATH(oob_test<T>(size, i), GetRightOOBMessage(off));
+      sprintf(expected_str, str, off);
+      EXPECT_DEATH(oob_test<T>(size, i), expected_str);
     }
   }
 
-  EXPECT_DEATH(oob_test<T>(kLargeMalloc, -1), GetLeftOOBMessage(1));
-  EXPECT_DEATH(oob_test<T>(kLargeMalloc, kLargeMalloc), GetRightOOBMessage(0));
+  EXPECT_DEATH(oob_test<T>(kLargeMalloc, -1),
+          "is located.*1 byte.*to the left");
+  EXPECT_DEATH(oob_test<T>(kLargeMalloc, kLargeMalloc),
+          "is located.*0 byte.*to the right");
 }
 
-// TODO(glider): the following tests are EXTREMELY slow on Darwin:
-//   AddressSanitizer.OOB_char (125503 ms)
-//   AddressSanitizer.OOB_int (126890 ms)
-//   AddressSanitizer.OOBRightTest (315605 ms)
-//   AddressSanitizer.SimpleStackTest (366559 ms)
-
 TEST(AddressSanitizer, OOB_char) {
   OOBTest<U1>();
 }
@@ -139,52 +99,3 @@
 TEST(AddressSanitizer, OOB_int) {
   OOBTest<U4>();
 }
-
-TEST(AddressSanitizer, OOBRightTest) {
-  size_t max_access_size = SANITIZER_WORDSIZE == 64 ? 8 : 4;
-  for (size_t access_size = 1; access_size <= max_access_size;
-       access_size *= 2) {
-    for (size_t alloc_size = 1; alloc_size <= 8; alloc_size++) {
-      for (size_t offset = 0; offset <= 8; offset += access_size) {
-        void *p = malloc(alloc_size);
-        // allocated: [p, p + alloc_size)
-        // accessed:  [p + offset, p + offset + access_size)
-        uint8_t *addr = (uint8_t*)p + offset;
-        if (offset + access_size <= alloc_size) {
-          asan_write_sized_aligned(addr, access_size);
-        } else {
-          int outside_bytes = offset > alloc_size ? (offset - alloc_size) : 0;
-          EXPECT_DEATH(asan_write_sized_aligned(addr, access_size),
-                       GetRightOOBMessage(outside_bytes));
-        }
-        free(p);
-      }
-    }
-  }
-}
-
-TEST(AddressSanitizer, LargeOOBRightTest) {
-  size_t large_power_of_two = 1 << 19;
-  for (size_t i = 16; i <= 256; i *= 2) {
-    size_t size = large_power_of_two - i;
-    char *p = Ident(new char[size]);
-    EXPECT_DEATH(p[size] = 0, GetRightOOBMessage(0));
-    delete [] p;
-  }
-}
-
-TEST(AddressSanitizer, DISABLED_DemoOOBLeftLow) {
-  oob_test<U1>(10, -1);
-}
-
-TEST(AddressSanitizer, DISABLED_DemoOOBLeftHigh) {
-  oob_test<U1>(kLargeMalloc, -1);
-}
-
-TEST(AddressSanitizer, DISABLED_DemoOOBRightLow) {
-  oob_test<U1>(10, 10);
-}
-
-TEST(AddressSanitizer, DISABLED_DemoOOBRightHigh) {
-  oob_test<U1>(kLargeMalloc, kLargeMalloc);
-}
diff --git a/tests/device/asan-smoke/test_config.py b/tests/device/asan-smoke/test_config.py
index 5193a2d..cd834c5 100644
--- a/tests/device/asan-smoke/test_config.py
+++ b/tests/device/asan-smoke/test_config.py
@@ -1,23 +1,8 @@
-from optparse import Option
-from typing import Optional
-
-from ndk.test.devices import Device
-from ndk.test.devicetest.case import TestCase
-
-
-def run_unsupported(test: TestCase, device: Device) -> Optional[str]:
+def run_unsupported(test, device):
     if device.version < 19:
-        return f"{device.version}"
-    if device.version >= 28 and test.config.abi == "x86_64":
+        return device.version
+    if device.version >= 28 and test.config.abi == 'x86_64':
         # ASAN is flaky with 28 x86_64. It still works with 32-bit or with
         # older platforms.
-        return "ASAN is flaky on 28 x86_64 (http://b/37130178)"
-    if test.config.abi == "riscv64":
-        return "ASAN is unsupported on riscv64"
+        return 'ASAN is flaky on 28 x86_64 (http://b/37130178)'
     return None
-
-
-def run_broken(test: TestCase, device: Device) -> tuple[Optional[str], Optional[str]]:
-    if device.version == 21:
-        return f"{device.version}", "https://github.com/android/ndk/issues/1753"
-    return None, None
diff --git a/tests/device/b8708181-Vector4/jni/Application.mk b/tests/device/b8708181-Vector4/jni/Application.mk
index d7f8a09..2a7dcb3 100644
--- a/tests/device/b8708181-Vector4/jni/Application.mk
+++ b/tests/device/b8708181-Vector4/jni/Application.mk
@@ -1 +1 @@
-APP_ABI := armeabi-v7a x86 arm64-v8a x86_64
+APP_ABI := armeabi-v7a x86 mips arm64-v8a x86_64 mips64
diff --git a/tests/device/b8708181-Vector4/jni/Vector4.cpp b/tests/device/b8708181-Vector4/jni/Vector4.cpp
index fef7bde..04e1597 100644
--- a/tests/device/b8708181-Vector4/jni/Vector4.cpp
+++ b/tests/device/b8708181-Vector4/jni/Vector4.cpp
@@ -19,6 +19,9 @@
 #include <xmmintrin.h>
 #define SP  "esp"
 typedef __m128 float32x4_t;
+#elif defined(__mips__)  // mipsel64- defines __mips__ too
+#define SP  "sp"
+typedef float float32x4_t __attribute__ ((__vector_size__ (16)));
 #else
 #error unknown arch for type float32x4_t
 #endif
diff --git a/tests/device/b8708181-Vector4/test_config.py b/tests/device/b8708181-Vector4/test_config.py
deleted file mode 100644
index 1f99a58..0000000
--- a/tests/device/b8708181-Vector4/test_config.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from typing import Optional
-
-from ndk.test.devicetest.case import TestCase
-
-
-def build_unsupported(test: TestCase) -> Optional[str]:
-    # Validate if vector types allocate the proper amount of alignment on
-    # architectures that support such instructions, when returning large
-    # composite types.
-    #
-    # Some architectures, like 'riscv64' may be excluded if they employ
-    # sizeless types. In this case, the vector types are incomplete and
-    # cannot be members of unions, classes or structures and must have
-    # automatic storage duration. As this particular test requires returning
-    # a large composite type and we cannot compose types with other sizeless
-    # types, this test can be skipped for the architecture.
-    if test.config.abi not in ("armeabi-v7a", "x86", "arm64-v8a", "x86_64"):
-        return test.config.abi
-
-    return None
diff --git a/tests/device/clone/jni/Android.mk b/tests/device/clone/jni/Android.mk
new file mode 100644
index 0000000..3fc68eb
--- /dev/null
+++ b/tests/device/clone/jni/Android.mk
@@ -0,0 +1,12 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := clone
+LOCAL_SRC_FILES := clone.c
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := clone-static
+LOCAL_SRC_FILES := clone.c
+LOCAL_LDFLAGS += -static
+include $(BUILD_EXECUTABLE)
diff --git a/tests/device/clone/jni/Application.mk b/tests/device/clone/jni/Application.mk
new file mode 100644
index 0000000..b448d58
--- /dev/null
+++ b/tests/device/clone/jni/Application.mk
@@ -0,0 +1,2 @@
+APP_ABI := all
+APP_PLATFORM := android-9
diff --git a/tests/device/clone/jni/clone.c b/tests/device/clone/jni/clone.c
new file mode 100644
index 0000000..266be58
--- /dev/null
+++ b/tests/device/clone/jni/clone.c
@@ -0,0 +1,44 @@
+#include <stdio.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <linux/sched.h>
+#include <stdlib.h>
+
+int v, fd;
+
+int child_proc()
+{
+    v = 42;
+    close(fd);
+    exit(0);
+}
+
+#define STACK_SIZE 1024
+
+int main(int argc, char *argv[])
+{
+    void **child_stack;
+    char ch;
+
+    v = 9;
+    fd = open(argv[0], O_RDONLY);
+    if (read(fd, &ch, 1) < 1) {
+        printf("Can't read file");
+        exit(1);
+    }
+    child_stack = (void **) malloc(STACK_SIZE * sizeof(void *));
+    printf("v = %d\n", v);
+
+    clone(child_proc, child_stack + STACK_SIZE, CLONE_VM|CLONE_FILES, NULL);
+    sleep(1);
+
+    printf("v = %d\n", v);
+    if (read(fd, &ch, 1) < 1) {
+        printf("Can't read file because it's closed by child.\n");
+        return 0;
+    } else {
+        printf("We shouldn't be able to read from file which is closed by child.\n");
+        return 0;
+    }
+}
+
diff --git a/tests/device/clone/test_config.py b/tests/device/clone/test_config.py
new file mode 100644
index 0000000..64eecf8
--- /dev/null
+++ b/tests/device/clone/test_config.py
@@ -0,0 +1,4 @@
+def build_unsupported(test):
+    if test.config.abi == 'x86' and test.config.api < 17:
+        return test.config.abi
+    return None
diff --git a/tests/device/cmake_gtest/CMakeLists.txt b/tests/device/cmake_gtest/CMakeLists.txt
deleted file mode 100644
index ebc4027..0000000
--- a/tests/device/cmake_gtest/CMakeLists.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(CMakeGTest CXX)
-
-set(GTEST_PATH "${ANDROID_NDK}/sources/third_party/googletest")
-
-add_executable(foo
-    foo.cpp
-    ${GTEST_PATH}/src/gtest-all.cc
-    ${GTEST_PATH}/src/gtest_main.cc
-)
-target_include_directories(foo PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
\ No newline at end of file
diff --git a/tests/device/cmake_gtest/foo.cpp b/tests/device/cmake_gtest/foo.cpp
deleted file mode 100644
index 56cd3c4..0000000
--- a/tests/device/cmake_gtest/foo.cpp
+++ /dev/null
@@ -1,5 +0,0 @@
-#include "gtest/gtest.h"
-
-TEST(add, add) {
-  ASSERT_EQ(4, 2 + 2);
-}
\ No newline at end of file
diff --git a/tests/device/emm/test_config.py b/tests/device/emm/test_config.py
index 9f4f6a6..5ac2d9c 100644
--- a/tests/device/emm/test_config.py
+++ b/tests/device/emm/test_config.py
@@ -1,5 +1,5 @@
 def build_unsupported(test):
-    if test.config.abi != "x86":
+    if test.config.abi != 'x86':
         return test.config.abi
 
     return None
diff --git a/tests/device/emutls-dealloc/jni/pthread_test.cpp b/tests/device/emutls-dealloc/jni/pthread_test.cpp
index ce04276..4961b5e 100644
--- a/tests/device/emutls-dealloc/jni/pthread_test.cpp
+++ b/tests/device/emutls-dealloc/jni/pthread_test.cpp
@@ -9,7 +9,7 @@
 
 #include <gtest/gtest.h>
 
-thread_local int foo;
+thread_local int dummy;
 thread_local char tls_var[1024 * 1024];
 
 int dtor_count = 0;
@@ -36,7 +36,7 @@
 
 TEST(emutls, pthread_test) {
   // Ensure that emutls (with its pthread key) is initialized.
-  foo = 1;
+  dummy = 1;
 
   // Create another pthread key to call test_dtor.
   pthread_key_t key;
diff --git a/tests/device/emutls-dealloc/test_config.py b/tests/device/emutls-dealloc/test_config.py
deleted file mode 100644
index 1059c13..0000000
--- a/tests/device/emutls-dealloc/test_config.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from typing import Optional
-
-from ndk.abis import Abi
-from ndk.test.devices import Device
-from ndk.test.devicetest.case import TestCase
-
-
-def run_broken(test: TestCase, device: Device) -> tuple[Optional[str], Optional[str]]:
-    if device.version == 21 and test.config.abi == Abi("armeabi-v7a"):
-        return f"{device.version}", "https://github.com/android/ndk/issues/1753"
-    return None, None
diff --git a/tests/device/emutls-key-deletion/jni/dlclose_main.cpp b/tests/device/emutls-key-deletion/jni/dlclose_main.cpp
index 1256a84..8cc87d4 100644
--- a/tests/device/emutls-key-deletion/jni/dlclose_main.cpp
+++ b/tests/device/emutls-key-deletion/jni/dlclose_main.cpp
@@ -12,10 +12,6 @@
 int main() {
   std::thread([] {
     void* solib = dlopen("libndktest.so", RTLD_NOW);
-    if (!solib) {
-      fprintf(stderr, "can't find libndktest.so (%s)\n", dlerror());
-      abort();
-    }
     void (*test_func)() = (void(*)())dlsym(solib, "test_func");
     if (!test_func) {
       fprintf(stderr, "can't find test_func func (%s)\n", dlerror());
diff --git a/tests/device/fortify_runtime/jni/fortify_test.cpp b/tests/device/fortify_runtime/jni/fortify_test.cpp
index 53ded1f..73839a3 100644
--- a/tests/device/fortify_runtime/jni/fortify_test.cpp
+++ b/tests/device/fortify_runtime/jni/fortify_test.cpp
@@ -1,24 +1,11 @@
 #include <memory.h>
 
-#include <android/api-level.h>
-
 #include "gtest/gtest.h"
 #include "gtest/gtest-death-test.h"
 
-static const char* expected_stderr() {
-  if (android_get_device_api_level() <= 21) {
-    // The program is still halted and logcat includes the message on kitkat,
-    // but that message doesn't reach stderr. I'm not sure when that was fixed,
-    // so this may need revising each time we increase the lowest tested API
-    // level.
-    return "";
-  } else {
-    return "memset: prevented 5-byte write into 4-byte buffer";
-  }
-}
-
 TEST(fortify, smoke) {
   char cs[4];
   char* p = cs;
-  ASSERT_DEATH(memset(p, 0, 5), expected_stderr());
+  ASSERT_DEATH(memset(p, 0, 5),
+               "memset: prevented 5-byte write into 4-byte buffer");
 }
diff --git a/tests/device/fortify_runtime/test_config.py b/tests/device/fortify_runtime/test_config.py
new file mode 100644
index 0000000..3ffec09
--- /dev/null
+++ b/tests/device/fortify_runtime/test_config.py
@@ -0,0 +1,10 @@
+from typing import Optional
+
+from ndk.test.devices import Device
+from ndk.test.types import Test
+
+
+def run_unsupported(test: Test, device: Device) -> Optional[str]:
+    if test.config.api is not None and test.config.api < 17:
+        return f'__memset_chk not available in android-{test.config.api}'
+    return None
diff --git a/tests/device/fuzzer/test_config.py b/tests/device/fuzzer/test_config.py
index d675642..d8f6f8d 100644
--- a/tests/device/fuzzer/test_config.py
+++ b/tests/device/fuzzer/test_config.py
@@ -1,10 +1,10 @@
 from typing import Optional
 
-from ndk.test.buildtest.case import Test
-from ndk.test.devices import DeviceConfig
+from ndk.test.devices import Device
+from ndk.test.types import Test
 
 
-def run_unsupported(test: Test, _device: DeviceConfig) -> Optional[str]:
-    if test.name == "fuzzer.fuzz_test":
-        return "not a real test"
+def run_unsupported(test: Test, _device: Device) -> Optional[str]:
+    if test.name == 'fuzzer.fuzz_test':
+        return 'not a real test'
     return None
diff --git a/tests/device/gtest/test_config.py b/tests/device/gtest/test_config.py
index ec898f9..a0516fb 100644
--- a/tests/device/gtest/test_config.py
+++ b/tests/device/gtest/test_config.py
@@ -1,7 +1,14 @@
 def run_unsupported(test, device):
     # The tested behavior fails reliably on API 16, but it's flaky on 24, so
     # skip the test until 26 where it appears reliable.
-    if test.executable == "googletest-death-test-test" and device.version < 26:
-        bug = "https://github.com/android-ndk/ndk/issues/795"
-        return f"android-{device.version} ({bug})"
+    if test.executable == 'googletest-death-test-test' and device.version < 26:
+        bug = 'https://github.com/android-ndk/ndk/issues/795'
+        return f'android-{device.version} ({bug})'
     return None
+
+
+def run_broken(test, device):
+    if test.executable == 'googletest-printers-test' and device.version <= 16:
+        return (f'android-{device.version}',
+                'https://github.com/android-ndk/ndk/issues/771')
+    return None, None
diff --git a/tests/device/hwasan-smoke/CMakeLists.txt b/tests/device/hwasan-smoke/CMakeLists.txt
deleted file mode 100644
index 35e35ee..0000000
--- a/tests/device/hwasan-smoke/CMakeLists.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(CMakeDefaultFlagsTest CXX)
-
-set(GTEST_PATH "${ANDROID_NDK}/sources/third_party/googletest")
-
-add_library(hwasan-smoke-cmake SHARED
-    ${GTEST_PATH}/src/gtest-all.cc
-    jni/hwasan_oob_test.cc)
-
-add_executable(hwasan-smoke-cmake_exe
-    jni/hwasan_oob_test.cc
-    ${GTEST_PATH}/src/gtest-all.cc
-    ${GTEST_PATH}/src/gtest_main.cc
-)
-
-target_include_directories(hwasan-smoke-cmake PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
-target_include_directories(hwasan-smoke-cmake_exe PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
diff --git a/tests/device/hwasan-smoke/jni/Android.mk b/tests/device/hwasan-smoke/jni/Android.mk
deleted file mode 100644
index d52c521..0000000
--- a/tests/device/hwasan-smoke/jni/Android.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := hwasan_smoke
-LOCAL_CPP_EXTENSION := .cc
-LOCAL_SRC_FILES := hwasan_oob_test.cc
-LOCAL_CFLAGS := -fsanitize=hwaddress -fno-omit-frame-pointer
-LOCAL_LDFLAGS := -fsanitize=hwaddress
-LOCAL_STATIC_LIBRARIES := googletest_main
-include $(BUILD_EXECUTABLE)
-
-$(call import-module,third_party/googletest)
diff --git a/tests/device/hwasan-smoke/jni/Application.mk b/tests/device/hwasan-smoke/jni/Application.mk
deleted file mode 100644
index a506e41..0000000
--- a/tests/device/hwasan-smoke/jni/Application.mk
+++ /dev/null
@@ -1,3 +0,0 @@
-APP_ABI := arm64-v8a
-APP_STL := c++_shared
-APP_PLATFORM := android-34
diff --git a/tests/device/hwasan-smoke/jni/hwasan_oob_test.cc b/tests/device/hwasan-smoke/jni/hwasan_oob_test.cc
deleted file mode 100644
index 69bbf5e..0000000
--- a/tests/device/hwasan-smoke/jni/hwasan_oob_test.cc
+++ /dev/null
@@ -1,23 +0,0 @@
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-#include <string>
-
-#include <gtest/gtest.h>
-
-#if !defined(__aarch64__)
-#error "HWASan is only supported on AArch64."
-#endif
-
-#if !__has_feature(hwaddress_sanitizer)
-#error "Want HWASan build"
-#endif
-
-
-TEST(HWAddressSanitizer, OOB) {
-  EXPECT_DEATH({
-      volatile char* x = const_cast<volatile char*>(reinterpret_cast<char*>(malloc(1)));
-      x[1] = '2';
-      }, ".*HWAddressSanitizer.*");
-}
diff --git a/tests/device/hwasan-smoke/test_config.py b/tests/device/hwasan-smoke/test_config.py
deleted file mode 100644
index 2908595..0000000
--- a/tests/device/hwasan-smoke/test_config.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from ndk.test.devices import DeviceConfig
-from ndk.test.devicetest.case import TestCase
-
-
-def build_unsupported(test: TestCase) -> str | None:
-    if test.config.abi != "arm64-v8a":
-        return f"{test.config.abi}"
-    return None
-
-
-def run_unsupported(test: TestCase, device: DeviceConfig) -> str | None:
-    if device.version < 34:
-        return f"{device.version}"
-    return None
-
-
-def run_broken(test: TestCase, device: DeviceConfig) -> tuple[str | None, str | None]:
-    # FIXME: support c++_shared tests for cmake and re-enable
-    # currently the c++ library is not properly pushed so the
-    # test fails to link
-    if test.build_system == "cmake":
-        return f"{test.build_system}", "https://github.com/android/ndk/issues/1942"
-    return None, None
-
-
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_SANITIZE=hwaddress", "-DANDROID_STL=c++_shared"]
diff --git a/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c b/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c
index 4ffeefb..49d35ee 100644
--- a/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c
+++ b/tests/device/issue19851-sigsetjmp/jni/issue19851-sigsetjmp.c
@@ -17,7 +17,6 @@
 #include <stdio.h>
 #include <signal.h>
 #include <setjmp.h>
-#include <unistd.h>
 
 static sigjmp_buf sbuf;
 
diff --git a/tests/device/issue61659-neon-assignment/test_config.py b/tests/device/issue61659-neon-assignment/test_config.py
index 1db330b..84e726b 100644
--- a/tests/device/issue61659-neon-assignment/test_config.py
+++ b/tests/device/issue61659-neon-assignment/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi != "armeabi-v7a":
+    if test.config.abi != 'armeabi-v7a':
         return test.config.abi
     return None
diff --git a/tests/device/weak_symbols_build_support/jni/Android.mk b/tests/device/log2/jni/Android.mk
similarity index 64%
rename from tests/device/weak_symbols_build_support/jni/Android.mk
rename to tests/device/log2/jni/Android.mk
index 7429d9a..144b10e 100644
--- a/tests/device/weak_symbols_build_support/jni/Android.mk
+++ b/tests/device/log2/jni/Android.mk
@@ -1,10 +1,9 @@
 LOCAL_PATH := $(call my-dir)
 
 include $(CLEAR_VARS)
-LOCAL_MODULE := weak_symbols
-LOCAL_SRC_FILES := weak_symbols.cpp
+LOCAL_MODULE := log2-test
+LOCAL_SRC_FILES := log2_test.cpp
 LOCAL_STATIC_LIBRARIES := googletest_main
-LOCAL_LDLIBS := -landroid
 include $(BUILD_EXECUTABLE)
 
-$(call import-module,third_party/googletest)
\ No newline at end of file
+$(call import-module,third_party/googletest)
diff --git a/tests/build/static_cxx_linkable/jni/Application.mk b/tests/device/log2/jni/Application.mk
similarity index 100%
copy from tests/build/static_cxx_linkable/jni/Application.mk
copy to tests/device/log2/jni/Application.mk
diff --git a/tests/device/log2/jni/log2_test.cpp b/tests/device/log2/jni/log2_test.cpp
new file mode 100644
index 0000000..986fd3b
--- /dev/null
+++ b/tests/device/log2/jni/log2_test.cpp
@@ -0,0 +1,14 @@
+#include <math.h>
+
+#include <gtest/gtest.h>
+
+// https://github.com/android-ndk/ndk/issues/204
+// https://android-review.googlesource.com/276095
+// libandroid_support's definition of the byte-order for doubles on armeabi was
+// wrongly big endian rather than following the byte order of the ABI. This was
+// due to a missing check for __ARM_EABI__ (a bug in the upstream FreeBSD libm
+// source that bionic did not have because it has been updated since it was
+// fixed).
+TEST(log2, log2) {
+  ASSERT_FLOAT_EQ(3.0, log2(8.0));
+}
diff --git a/tests/device/math/test_config.py b/tests/device/math/test_config.py
index 3c2c6f4..818d06a 100644
--- a/tests/device/math/test_config.py
+++ b/tests/device/math/test_config.py
@@ -4,7 +4,7 @@
     return None
 
 
-def run_unsupported(_test, device):
+def run_unsupported(test, device):
     if device.version < 18:
         return device.version
     return None
diff --git a/tests/device/memtag-smoke/CMakeLists.txt b/tests/device/memtag-smoke/CMakeLists.txt
deleted file mode 100644
index d8b1b90..0000000
--- a/tests/device/memtag-smoke/CMakeLists.txt
+++ /dev/null
@@ -1,17 +0,0 @@
-cmake_minimum_required(VERSION 3.6)
-project(CMakeDefaultFlagsTest CXX)
-
-set(GTEST_PATH "${ANDROID_NDK}/sources/third_party/googletest")
-
-add_library(mte-smoke-cmake SHARED
-    ${GTEST_PATH}/src/gtest-all.cc
-    jni/mte_oob_test.cc)
-
-add_executable(mte-smoke-cmake_exe
-    jni/mte_oob_test.cc
-    ${GTEST_PATH}/src/gtest-all.cc
-    ${GTEST_PATH}/src/gtest_main.cc
-)
-
-target_include_directories(mte-smoke-cmake PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
-target_include_directories(mte-smoke-cmake_exe PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
diff --git a/tests/device/memtag-smoke/jni/Android.mk b/tests/device/memtag-smoke/jni/Android.mk
deleted file mode 100644
index 89d8fae..0000000
--- a/tests/device/memtag-smoke/jni/Android.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := mte_smoke
-LOCAL_CPP_EXTENSION := .cc
-LOCAL_SRC_FILES := mte_oob_test.cc
-LOCAL_CFLAGS := -fsanitize=memtag-stack -march=armv8-a+memtag -fno-omit-frame-pointer
-LOCAL_LDFLAGS := -fsanitize=memtag-stack,memtag-heap -fsanitize-memtag-mode=sync -march=armv8-a+memtag
-LOCAL_STATIC_LIBRARIES := googletest_main
-include $(BUILD_EXECUTABLE)
-
-$(call import-module,third_party/googletest)
diff --git a/tests/device/memtag-smoke/jni/Application.mk b/tests/device/memtag-smoke/jni/Application.mk
deleted file mode 100644
index a506e41..0000000
--- a/tests/device/memtag-smoke/jni/Application.mk
+++ /dev/null
@@ -1,3 +0,0 @@
-APP_ABI := arm64-v8a
-APP_STL := c++_shared
-APP_PLATFORM := android-34
diff --git a/tests/device/memtag-smoke/jni/mte_oob_test.cc b/tests/device/memtag-smoke/jni/mte_oob_test.cc
deleted file mode 100644
index a2b46dd..0000000
--- a/tests/device/memtag-smoke/jni/mte_oob_test.cc
+++ /dev/null
@@ -1,20 +0,0 @@
-#include <stdlib.h>
-
-#include <gtest/gtest.h>
-
-#if !defined(__aarch64__)
-#error "MTE is only supported on AArch64."
-#endif
-
-#if !__has_feature(memtag_stack)
-#error "Want MTE build"
-#endif
-
-
-TEST(Memtag, OOB) {
-  // Cannot assert the death message, because it doesn't get printed to stderr.
-  EXPECT_DEATH({
-      volatile char* x = const_cast<volatile char*>(reinterpret_cast<char*>(malloc(16)));
-      x[17] = '2';
-      }, "");
-}
diff --git a/tests/device/memtag-smoke/test_config.py b/tests/device/memtag-smoke/test_config.py
deleted file mode 100644
index daccef7..0000000
--- a/tests/device/memtag-smoke/test_config.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from ndk.test.devices import DeviceConfig
-from ndk.test.devicetest.case import TestCase
-
-
-def build_unsupported(test: TestCase) -> str | None:
-    if test.config.abi != "arm64-v8a":
-        return f"{test.config.abi}"
-    return None
-
-
-def run_unsupported(test: TestCase, device: DeviceConfig) -> str | None:
-    if device.version < 34:
-        return f"{device.version}"
-    if not device.supports_mte:
-        return "MTE not enabled"
-    return None
-
-
-def run_broken(test: TestCase, device: DeviceConfig) -> tuple[str | None, str | None]:
-    return None, None
-
-
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_SANITIZE=memtag"]
diff --git a/tests/device/openmp/jni/fib.c b/tests/device/openmp/jni/fib.c
index ffe316b..fb3e3e1 100644
--- a/tests/device/openmp/jni/fib.c
+++ b/tests/device/openmp/jni/fib.c
@@ -1,11 +1,8 @@
-#include <math.h>
 #include <stdio.h>
 #include <stdlib.h>
-#include <sys/time.h>
-#include <unistd.h>
-
+#include <math.h>
 #include <omp.h>
-
+#include <unistd.h>
 #define MAX 33 //41
 int Fibonacci(int n)
 {   int x, y;
@@ -34,7 +31,7 @@
   struct timeval time_start, time_end;
   int i = 0;
   // openmp related print message
-  printf("CPU_ONLN= %ld\n", sysconf(_SC_NPROCESSORS_ONLN));
+  printf("CPU_ONLN= %d\n", sysconf(_SC_NPROCESSORS_ONLN));
   printf("Number of CPUs=%d\n", omp_get_num_procs());
   printf("Number of max threads=%d\n", omp_get_max_threads());
   printf("Number of executing thread=%d\n", omp_get_thread_num());
diff --git a/tests/device/openmp/jni/openmp2.c b/tests/device/openmp/jni/openmp2.c
index 1e05817..aa5575d 100644
--- a/tests/device/openmp/jni/openmp2.c
+++ b/tests/device/openmp/jni/openmp2.c
@@ -6,21 +6,21 @@
 int main (int argc, char *argv[])
 {
     int nthreads, tid;
-    printf("SC_NPROCESSORS_ONLN: %ld\n", sysconf (_SC_NPROCESSORS_ONLN));
-    printf("SC_NPROCESSORS_CONF: %ld\n", sysconf (_SC_NPROCESSORS_CONF));
+    printf("SC_NPROCESSORS_ONLN: %d\n", sysconf (_SC_NPROCESSORS_ONLN));
+    printf("SC_NPROCESSORS_CONF: %d\n", sysconf (_SC_NPROCESSORS_CONF));
   #pragma omp parallel default(shared) private(nthreads, tid)
     /* Fork a team of threads giving them their own copies of variables */
     {
       /* Obtain thread number */
         tid = omp_get_thread_num();
         printf("Hello World from thread = %d\n", tid);
-      /* Only main thread does this */
+      /* Only master thread does this */
         if (tid == 0)
         {
             nthreads = omp_get_num_threads();
             printf("Number of threads = %d\n", nthreads);
         }
-    }  /* All threads join main thread and disband */
+    }  /* All threads join master thread and disband */
 
   return 0;
 }
diff --git a/tests/device/polly/CMakeLists.txt b/tests/device/polly/CMakeLists.txt
deleted file mode 100644
index ef3d99e..0000000
--- a/tests/device/polly/CMakeLists.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-cmake_minimum_required(VERSION 3.17)
-project(polly_test)
-
-add_executable(polly_test jni/polly_test.cpp)
-target_compile_options(polly_test PRIVATE -mllvm -polly)
diff --git a/tests/device/polly/jni/Android.mk b/tests/device/polly/jni/Android.mk
deleted file mode 100644
index 9f1f6e6..0000000
--- a/tests/device/polly/jni/Android.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := polly_test
-LOCAL_SRC_FILES := polly_test.cpp
-LOCAL_CPPFLAGS := -mllvm -polly
-include $(BUILD_EXECUTABLE)
diff --git a/tests/device/polly/jni/polly_test.cpp b/tests/device/polly/jni/polly_test.cpp
deleted file mode 100644
index aa8a4ea..0000000
--- a/tests/device/polly/jni/polly_test.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-int main(int, char**) {
-  return 0;
-}
diff --git a/tests/device/rs-cpp-basic/jni/Android.mk b/tests/device/rs-cpp-basic/jni/Android.mk
new file mode 100644
index 0000000..8b5320a
--- /dev/null
+++ b/tests/device/rs-cpp-basic/jni/Android.mk
@@ -0,0 +1,27 @@
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# This is the shared library included by the JNI test app.
+#
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := rstest-compute
+LOCAL_SRC_FILES:= mono.rs compute.cpp
+LOCAL_LDLIBS := -llog
+LOCAL_STATIC_LIBRARIES := RScpp_static
+include $(BUILD_EXECUTABLE)
+
+$(call import-module,android/renderscript)
diff --git a/build/cmake/hooks/post/Android.cmake b/tests/device/rs-cpp-basic/jni/Application.mk
similarity index 77%
rename from build/cmake/hooks/post/Android.cmake
rename to tests/device/rs-cpp-basic/jni/Application.mk
index 523f4e8..b985731 100644
--- a/build/cmake/hooks/post/Android.cmake
+++ b/tests/device/rs-cpp-basic/jni/Application.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2016 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,5 +12,5 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# This is a hook file that will be included by cmake at the end of
-# Modules/Platform/Android.cmake.
+APP_PLATFORM := android-19
+APP_STL := c++_static
diff --git a/tests/device/rs-cpp-basic/jni/compute.cpp b/tests/device/rs-cpp-basic/jni/compute.cpp
new file mode 100644
index 0000000..7ab7ffb
--- /dev/null
+++ b/tests/device/rs-cpp-basic/jni/compute.cpp
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if __clang__ && __has_include(<ftw.h>)
+// The deprecated headers don't have this.
+#define USE_FTW
+#include <ftw.h>
+#endif
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <string>
+
+#include "RenderScript.h"
+#include "ScriptC_mono.h"
+
+#ifdef USE_FTW
+int _remove_callback(const char* fpath, const struct stat* sb, int typeflag,
+                     struct FTW* ftwbuf) {
+  int rv = remove(fpath);
+  if (rv == -1) {
+    perror("remove");
+  }
+
+  return rv;
+}
+
+int remove_directory(const char* path) {
+  return nftw(path, _remove_callback, 64, FTW_DEPTH | FTW_PHYS);
+}
+#else
+int remove_directory(const char* path) {
+  std::string cmd("rm -r ");
+  cmd += path;
+  int rv = system(cmd.c_str());
+  if (rv == -1) {
+    perror("system");
+  }
+
+  return rv;
+}
+#endif  // USE_FTW
+
+class ScopedTempDir {
+ public:
+  ScopedTempDir(const std::string& base_temp_dir) : temp_dir_(base_temp_dir) {
+    temp_dir_ += "/rs-cache-XXXXXX";
+    if (mkdtemp(&temp_dir_[0]) == NULL) {
+      perror("mkdtemp");
+      abort();
+    }
+  }
+
+  ~ScopedTempDir() {
+      remove_directory(temp_dir_.c_str());
+  }
+
+  const std::string& path() const {
+      return temp_dir_;
+  }
+
+ private:
+  std::string temp_dir_;
+};
+
+int test_compute() {
+  bool failed = false;
+
+  {
+    sp<RS> rs = new RS();
+    printf("New RS %p\n", rs.get());
+
+    // only legitimate because this is a standalone executable
+    ScopedTempDir temp_dir("/data/local/tmp");
+    bool r = rs->init(temp_dir.path().c_str());
+    printf("Init returned %i\n", r);
+
+    sp<const Element> e = Element::RGBA_8888(rs);
+    printf("Element %p\n", e.get());
+
+    Type::Builder tb(rs, e);
+    tb.setX(128);
+    tb.setY(128);
+    sp<const Type> t = tb.create();
+    printf("Type %p\n", t.get());
+
+    sp<Allocation> a1 = Allocation::createSized(rs, e, 1000);
+    printf("Allocation %p\n", a1.get());
+
+    sp<Allocation> ain = Allocation::createTyped(rs, t);
+    sp<Allocation> aout = Allocation::createTyped(rs, t);
+    printf("Allocation %p %p\n", ain.get(), aout.get());
+
+    sp<ScriptC_mono> sc = new ScriptC_mono(rs);
+    printf("new script\n");
+
+    sc->set_alloc(a1);
+    sc->set_elem(e);
+    sc->set_type(t);
+    sc->set_script(sc);
+    sc->set_script(nullptr);
+    sp<const Sampler> samp = Sampler::CLAMP_NEAREST(rs);
+    sc->set_sampler(samp);
+
+    // We read back the status from the script-side via a "failed" allocation.
+    sp<const Element> failed_e = Element::BOOLEAN(rs);
+    Type::Builder failed_tb(rs, failed_e);
+    failed_tb.setX(1);
+    sp<const Type> failed_t = failed_tb.create();
+    sp<Allocation> failed_alloc = Allocation::createTyped(rs, failed_t);
+
+    failed_alloc->copy1DRangeFrom(0, failed_t->getCount(), &failed);
+    sc->bind_failed(failed_alloc);
+
+    uint32_t* buf = new uint32_t[t->getCount()];
+    for (uint32_t ct = 0; ct < t->getCount(); ct++) {
+      buf[ct] = ct | (ct << 16);
+    }
+    ain->copy1DRangeFrom(0, t->getCount(), buf);
+    delete[] buf;
+
+    sc->forEach_root(ain, aout);
+
+    sc->invoke_foo(99, 3.1f);
+    sc->set_g_f(39.9f);
+    sc->set_g_i(-14);
+    sc->invoke_foo(99, 3.1f);
+    printf("for each done\n");
+
+    sc->invoke_bar(47, -3, 'c', -7, 14, -8);
+
+    // Verify a simple kernel.
+    {
+      static const uint32_t xDim = 7;
+      static const uint32_t yDim = 7;
+      sp<const Element> e = Element::I32(rs);
+      Type::Builder tb(rs, e);
+      tb.setX(xDim);
+      tb.setY(yDim);
+      sp<const Type> t = tb.create();
+      sp<Allocation> kern1_in = Allocation::createTyped(rs, t);
+      sp<Allocation> kern1_out = Allocation::createTyped(rs, t);
+
+      int* buf = new int[t->getCount()];
+      for (uint32_t ct = 0; ct < t->getCount(); ct++) {
+        buf[ct] = 5;
+      }
+      kern1_in->copy2DRangeFrom(0, 0, xDim, yDim, buf);
+      delete[] buf;
+
+      sc->forEach_kern1(kern1_in, kern1_out);
+      sc->forEach_verify_kern1(kern1_out);
+
+      rs->finish();
+      failed_alloc->copy1DTo(&failed);
+    }
+  }
+
+  return failed;
+}
+
+int main() {
+  bool failed = test_compute();
+
+  if (failed) {
+    printf("TEST FAILED!\n");
+  } else {
+    printf("TEST PASSED!\n");
+  }
+
+  return failed;
+}
diff --git a/tests/device/rs-cpp-basic/jni/mono.rs b/tests/device/rs-cpp-basic/jni/mono.rs
new file mode 100644
index 0000000..d4a718d
--- /dev/null
+++ b/tests/device/rs-cpp-basic/jni/mono.rs
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+int g_i = 4;
+
+float g_f = 5.9;
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+bool *failed;
+
+#define _RS_ASSERT(b) \
+do { \
+    if (!(b)) { \
+        *failed = true; \
+        rsDebug(#b " FAILED", 0); \
+    } \
+\
+} while (0)
+
+struct myStruct {
+    int i;
+    int j;
+    float f;
+    char c[3];
+};
+
+rs_allocation alloc;
+rs_element elem;
+rs_type type;
+rs_sampler sampler;
+rs_script script;
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+    float4 f4 = rsUnpackColor8888(*v_in);
+
+    float3 mono = dot(f4.rgb, gMonoMult);
+    *v_out = rsPackColorTo8888(mono);
+}
+
+void foo(int i, float f) {
+    rsDebug("g_i", g_i);
+    rsDebug("g_f", g_f);
+    rsDebug("i", i);
+    rsDebug("f", f);
+}
+
+void bar(int i, int j, char k, int l, int m, int n) {
+    _RS_ASSERT(i == 47);
+    _RS_ASSERT(j == -3);
+    _RS_ASSERT(k == 'c');
+    _RS_ASSERT(l == -7);
+    _RS_ASSERT(m == 14);
+    _RS_ASSERT(n == -8);
+}
+
+int RS_KERNEL kern1(int i, uint32_t x, uint32_t y) {
+    return i + 10 * x + 100 *y;
+}
+
+void RS_KERNEL verify_kern1(int i, uint32_t x, uint32_t y) {
+    _RS_ASSERT(i == (5 + 10 * x + 100 * y));
+    rsDebug("i ", i);
+}
+
diff --git a/tests/device/static-executable-exceptions/jni/Application.mk b/tests/device/static-executable-exceptions/jni/Application.mk
index 9ec531a..2133d20 100644
--- a/tests/device/static-executable-exceptions/jni/Application.mk
+++ b/tests/device/static-executable-exceptions/jni/Application.mk
@@ -1 +1 @@
-APP_PLATFORM := latest
+APP_PLATFORM := android-21
diff --git a/tests/device/static-executable-exceptions/test_config.py b/tests/device/static-executable-exceptions/test_config.py
index 59c5cab..41de161 100644
--- a/tests/device/static-executable-exceptions/test_config.py
+++ b/tests/device/static-executable-exceptions/test_config.py
@@ -1,13 +1,7 @@
-import ndk.abis
-from ndk.test.buildtest.case import Test
+def build_unsupported(test):
+    # Static executables with libc++ require targeting a new enough API level
+    # to not need libandroid_support.
+    if test.config.api < 21:
+        return f'android-{test.config.api}'
 
-
-def extra_cmake_flags() -> list[str]:
-    # Required for static executables.
-    return ["-DANDROID_PLATFORM=latest"]
-
-
-def override_runtime_minsdkversion(test: Test) -> int | None:
-    # We build as latest because static executables require that, but static executables
-    # are compatible with old OS versions.
-    return ndk.abis.min_api_for_abi(test.config.abi)
+    return None
diff --git a/tests/device/static-executable/jni/Application.mk b/tests/device/static-executable/jni/Application.mk
deleted file mode 100644
index 9ec531a..0000000
--- a/tests/device/static-executable/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_PLATFORM := latest
diff --git a/tests/device/static-executable/test_config.py b/tests/device/static-executable/test_config.py
index 59c5cab..afd4ef2 100644
--- a/tests/device/static-executable/test_config.py
+++ b/tests/device/static-executable/test_config.py
@@ -1,13 +1,5 @@
-import ndk.abis
-from ndk.test.buildtest.case import Test
-
-
-def extra_cmake_flags() -> list[str]:
-    # Required for static executables.
-    return ["-DANDROID_PLATFORM=latest"]
-
-
-def override_runtime_minsdkversion(test: Test) -> int | None:
-    # We build as latest because static executables require that, but static executables
-    # are compatible with old OS versions.
-    return ndk.abis.min_api_for_abi(test.config.abi)
+def extra_cmake_flags():
+    # Match the ndk-build test. Using libc++ here would require us to target a
+    # newer API level since static executables and libandroid_support don't
+    # mix.
+    return ['-DANDROID_STL=system']
diff --git a/tests/device/static_exe_lto/jni/Android.mk b/tests/device/static_exe_lto/jni/Android.mk
deleted file mode 100644
index 1e3e0d3..0000000
--- a/tests/device/static_exe_lto/jni/Android.mk
+++ /dev/null
@@ -1,8 +0,0 @@
-# Regression test for https://github.com/android/ndk/issues/1461.
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-LOCAL_LDFLAGS := -static -flto
-include $(BUILD_EXECUTABLE)
diff --git a/tests/device/static_exe_lto/jni/foo.cpp b/tests/device/static_exe_lto/jni/foo.cpp
deleted file mode 100644
index 5b7bef5..0000000
--- a/tests/device/static_exe_lto/jni/foo.cpp
+++ /dev/null
@@ -1,21 +0,0 @@
-#include <stdio.h>
-
-static bool global_ctor_called = false;
-
-struct SideEffectClass {
-  SideEffectClass() {
-    global_ctor_called = true;
-  }
-};
-
-static SideEffectClass global{};
-
-int main(int, char**) {
-  // Regression test for https://github.com/android/ndk/issues/1461. Without the
-  // fix, the global constructor will not have been called.
-  if (!global_ctor_called) {
-    fprintf(stderr, "Global constructor was not called before main\n");
-    return 1;
-  }
-  return 0;
-}
\ No newline at end of file
diff --git a/tests/device/test-cpufeatures/jni/test_android_setCpu_1.c b/tests/device/test-cpufeatures/jni/test_android_setCpu_1.c
index a226fee..cb51a5c 100644
--- a/tests/device/test-cpufeatures/jni/test_android_setCpu_1.c
+++ b/tests/device/test-cpufeatures/jni/test_android_setCpu_1.c
@@ -1,4 +1,3 @@
-#include <inttypes.h>
 #include <stdio.h>
 #include <stdlib.h>
 
@@ -17,7 +16,7 @@
   // and that android_getCpuCount() and android_getCpuFeatures()
   // will return the corresponding values.
   //
-  printf("Setting cpu_count=%d, features=%08" PRIx64 "\n",
+  printf("Setting cpu_count=%d, features=%08llx\n",
          cpu_count,
          cpu_features);
   if (!android_setCpu(cpu_count, cpu_features))
@@ -26,7 +25,7 @@
   count = android_getCpuCount();
   features = android_getCpuFeatures();
 
-  printf("Retrieved cpu_count=%d, features=%08" PRIx64 "\n",
+  printf("Retrieved cpu_count=%d, features=%08llx\n",
          count, features);
 
   if (count != cpu_count)
@@ -43,3 +42,4 @@
   printf("Second call to android_setCpu() failed as expected.\n");
   return 0;
 }
+
diff --git a/tests/device/test-cpufeatures/jni/test_android_setCpu_2.c b/tests/device/test-cpufeatures/jni/test_android_setCpu_2.c
index a91695e..2466066 100644
--- a/tests/device/test-cpufeatures/jni/test_android_setCpu_2.c
+++ b/tests/device/test-cpufeatures/jni/test_android_setCpu_2.c
@@ -1,4 +1,3 @@
-#include <inttypes.h>
 #include <stdio.h>
 #include <stdlib.h>
 
@@ -16,13 +15,13 @@
   count = android_getCpuCount();
   features = android_getCpuFeatures();
 
-  printf("Retrieved cpu_count=%d, features=%08" PRIx64 "\n",
+  printf("Retrieved cpu_count=%d, features=%08llx\n",
          count, features);
 
   // Check that android_setCpu() will fail when it is called after
   // android_getCpuCount / android_getCpuFeatures.
   //
-  printf("Trying to set cpu_count=%d, features=%08" PRIx64 "\n",
+  printf("Trying to set cpu_count=%d, features=%08llx\n",
          cpu_count,
          cpu_features);
 
diff --git a/tests/device/test-cpufeatures/jni/test_arm_idiv.c b/tests/device/test-cpufeatures/jni/test_arm_idiv.c
index 7f64e5a..bc115f2 100644
--- a/tests/device/test-cpufeatures/jni/test_arm_idiv.c
+++ b/tests/device/test-cpufeatures/jni/test_arm_idiv.c
@@ -13,13 +13,11 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-#include <memory.h>
+#include <cpu-features.h>
 #include <setjmp.h>
 #include <stdio.h>
 #include <signal.h>
 
-#include <cpu-features.h>
-
 #ifndef __arm__
 #error "Only compile this file for an ARM target"
 #endif
diff --git a/tests/device/test-cpufeatures/jni/test_cpufeatures.c b/tests/device/test-cpufeatures/jni/test_cpufeatures.c
index 9a0b251..23036fa 100644
--- a/tests/device/test-cpufeatures/jni/test_cpufeatures.c
+++ b/tests/device/test-cpufeatures/jni/test_cpufeatures.c
@@ -13,6 +13,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+#define __STDC_FORMAT_MACROS 1
 
 #include <cpu-features.h>
 #include <inttypes.h>
@@ -21,20 +22,27 @@
 int main(void)
 {
     AndroidCpuFamily family = android_getCpuFamily();
-#if defined(__arm__)
-    if (family != ANDROID_CPU_FAMILY_ARM) {
-#elif defined(__aarch64__)
-    if (family != ANDROID_CPU_FAMILY_ARM64) {
-#elif defined(__i386__)
-    if (family != ANDROID_CPU_FAMILY_X86) {
-#elif defined(__x86_64__)
-    if (family != ANDROID_CPU_FAMILY_X86_64) {
-#elif defined(__riscv) && __riscv_xlen == 64
-    if (family != ANDROID_CPU_FAMILY_RISCV64) {
-#else
-    {
-#endif
-        fprintf(stderr, "Unsupported/incorrect CPU family: %d\n", family);
+    switch (family) {
+    case ANDROID_CPU_FAMILY_ARM:
+        printf("CPU family is ARM\n");
+        break;
+    case ANDROID_CPU_FAMILY_X86:
+        printf("CPU family is x86\n");
+        break;
+    case ANDROID_CPU_FAMILY_MIPS:
+        printf("CPU family is MIPS\n");
+        break;
+    case ANDROID_CPU_FAMILY_ARM64:
+        printf("CPU family is ARM64\n");
+        break;
+    case ANDROID_CPU_FAMILY_X86_64:
+        printf("CPU family is x86_64\n");
+        break;
+    case ANDROID_CPU_FAMILY_MIPS64:
+        printf("CPU family is MIPS64\n");
+        break;
+    default:
+        fprintf(stderr, "Unsupported CPU family: %d\n", family);
         return 1;
     }
 
@@ -84,6 +92,12 @@
         CHECK(X86, SSE4_1)
         CHECK(X86, SSE4_2)
         break;
+    case ANDROID_CPU_FAMILY_MIPS:
+    case ANDROID_CPU_FAMILY_MIPS64:
+        printf( "Supported MIPS features:\n");
+        CHECK(MIPS, R6)
+        CHECK(MIPS, MSA)
+        break;
     default:
         if (features != 0) {
             printf("ERROR: Unexpected CPU features mask: %016" PRIX64 "\n",
diff --git a/tests/device/thread_local_dlclose/CMakeLists.txt b/tests/device/thread_local_dlclose/CMakeLists.txt
deleted file mode 100644
index 12bba47..0000000
--- a/tests/device/thread_local_dlclose/CMakeLists.txt
+++ /dev/null
@@ -1,11 +0,0 @@
-cmake_minimum_required(VERSION 3.17.0)
-project(thread_local_dlclose)
-
-add_library(testlib SHARED jni/testlib.cpp)
-
-target_link_options(testlib
-  PRIVATE
-    -Wl,--version-script,${CMAKE_SOURCE_DIR}/jni/libtestlib.map.txt
-)
-
-add_executable(foo jni/foo.cpp)
diff --git a/tests/device/thread_local_dlclose/jni/Android.mk b/tests/device/thread_local_dlclose/jni/Android.mk
deleted file mode 100644
index affe2e9..0000000
--- a/tests/device/thread_local_dlclose/jni/Android.mk
+++ /dev/null
@@ -1,13 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := testlib
-LOCAL_SRC_FILES := testlib.cpp
-# Using a version script to ensure that the static libc++ is not re-exposed.
-LOCAL_LDFLAGS := -Wl,--version-script,$(LOCAL_PATH)/libtestlib.map.txt
-include $(BUILD_SHARED_LIBRARY)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := foo
-LOCAL_SRC_FILES := foo.cpp
-include $(BUILD_EXECUTABLE)
diff --git a/tests/device/thread_local_dlclose/jni/Application.mk b/tests/device/thread_local_dlclose/jni/Application.mk
deleted file mode 100644
index 1fc3c3d..0000000
--- a/tests/device/thread_local_dlclose/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_STL := c++_static
-APP_CPPFLAGS := -fexceptions -frtti
diff --git a/tests/device/thread_local_dlclose/jni/foo.cpp b/tests/device/thread_local_dlclose/jni/foo.cpp
deleted file mode 100644
index cdc0e7e..0000000
--- a/tests/device/thread_local_dlclose/jni/foo.cpp
+++ /dev/null
@@ -1,15 +0,0 @@
-#include <dlfcn.h>
-
-#include <thread>
-
-void myThread() {
-  void* lib = dlopen("./libtestlib.so", RTLD_LAZY);
-  auto func = reinterpret_cast<void (*)()>(dlsym(lib, "func"));
-  func();
-  dlclose(lib);
-}
-
-int main(int, char**) {
-  std::thread t(myThread);
-  t.join();
-}
diff --git a/tests/device/thread_local_dlclose/jni/libtestlib.map.txt b/tests/device/thread_local_dlclose/jni/libtestlib.map.txt
deleted file mode 100644
index 775be21..0000000
--- a/tests/device/thread_local_dlclose/jni/libtestlib.map.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-VERSION_1 {
-  global:
-    func;
-  local:
-    *;
-};
diff --git a/tests/device/thread_local_dlclose/jni/testlib.cpp b/tests/device/thread_local_dlclose/jni/testlib.cpp
deleted file mode 100644
index 844c80f..0000000
--- a/tests/device/thread_local_dlclose/jni/testlib.cpp
+++ /dev/null
@@ -1,9 +0,0 @@
-#include <iostream>
-
-extern "C" void func() {
-  try {
-    throw 0;
-  } catch (...) {
-    std::cerr << "Caught" << std::endl;
-  }
-}
diff --git a/tests/device/trivial_exception_catch/CMakeLists.txt b/tests/device/trivial_exception_catch/CMakeLists.txt
deleted file mode 100644
index 99da078..0000000
--- a/tests/device/trivial_exception_catch/CMakeLists.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(TrivialExceptionCatch CXX)
-
-add_executable(throw jni/throw.cpp)
diff --git a/tests/device/trivial_exception_catch/jni/Android.mk b/tests/device/trivial_exception_catch/jni/Android.mk
deleted file mode 100644
index 6525b9a..0000000
--- a/tests/device/trivial_exception_catch/jni/Android.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := throw
-LOCAL_SRC_FILES := throw.cpp
-include $(BUILD_EXECUTABLE)
diff --git a/tests/device/trivial_exception_catch/jni/Application.mk b/tests/device/trivial_exception_catch/jni/Application.mk
deleted file mode 100644
index 69416f4..0000000
--- a/tests/device/trivial_exception_catch/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_STL := c++_shared
-APP_CPPFLAGS := -fexceptions -frtti
diff --git a/tests/device/trivial_exception_catch/jni/throw.cpp b/tests/device/trivial_exception_catch/jni/throw.cpp
deleted file mode 100644
index 7547948..0000000
--- a/tests/device/trivial_exception_catch/jni/throw.cpp
+++ /dev/null
@@ -1,18 +0,0 @@
-// Regression test for https://github.com/android/ndk/issues/1769.
-#include <stdexcept>
-#include <string>
-#include <iostream>
-
-void f() {
-  std::string s = "test";
-  std::cout << s << std::endl;
-  throw std::runtime_error("Test");
-}
-
-
-int main() {
-  try {
-    f();
-  } catch(const std::exception&) {
-  }
-}
diff --git a/tests/device/tsan_smoke/jni/Android.mk b/tests/device/tsan_smoke/jni/Android.mk
deleted file mode 100644
index 6b27e40..0000000
--- a/tests/device/tsan_smoke/jni/Android.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := tsan_smoke
-LOCAL_CPP_EXTENSION := .cc
-LOCAL_SRC_FILES := tsan_tiny_race_test.cc
-LOCAL_CFLAGS := -fsanitize=thread
-LOCAL_LDFLAGS := -fsanitize=thread
-LOCAL_STATIC_LIBRARIES := googletest_main
-include $(BUILD_EXECUTABLE)
-
-$(call import-module,third_party/googletest)
diff --git a/tests/device/tsan_smoke/jni/Application.mk b/tests/device/tsan_smoke/jni/Application.mk
deleted file mode 100644
index 331ab0a..0000000
--- a/tests/device/tsan_smoke/jni/Application.mk
+++ /dev/null
@@ -1,3 +0,0 @@
-APP_STL := c++_shared
-APP_PLATFORM := android-24
-APP_STRIP_MODE := none
diff --git a/tests/device/tsan_smoke/jni/tsan_tiny_race_test.cc b/tests/device/tsan_smoke/jni/tsan_tiny_race_test.cc
deleted file mode 100644
index 3d3b345..0000000
--- a/tests/device/tsan_smoke/jni/tsan_tiny_race_test.cc
+++ /dev/null
@@ -1,29 +0,0 @@
-//===----------------------------------------------------------------------===//
-//
-// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
-// See https://llvm.org/LICENSE.txt for license information.
-// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
-//
-//===----------------------------------------------------------------------===//
-#include <pthread.h>
-#include <gtest/gtest.h>
-
-int Global;
-void *Thread1(void *x) {
-  Global = 42;
-  return x;
-}
-int RaceTest() {
-  pthread_t t;
-  pthread_create(&t, NULL, Thread1, NULL);
-  Global = 43;
-  pthread_join(t, NULL);
-  return Global;
-}
-
-TEST(tsan_smoke, RaceTest) {
-  ASSERT_DEATH(RaceTest(),
-               "tsan_smoke/jni/tsan_tiny_race_test.cc:*.: "
-               "virtual void assert_DeathTest_assert_false_Test::TestBody\\(\\): "
-               "assertion \"false\" failed");
-}
diff --git a/tests/device/tsan_smoke/test_config.py b/tests/device/tsan_smoke/test_config.py
deleted file mode 100644
index d31cf01..0000000
--- a/tests/device/tsan_smoke/test_config.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from ndk.test.devices import Device
-from ndk.test.devicetest.case import TestCase
-
-
-def build_unsupported(test):
-    # TODO(https://github.com/google/android-riscv64/issues/104): Add TSAN when it
-    # builds for RISCV64.
-    if test.config.is_lp32 or test.config.abi == "riscv64":
-        return test.config.abi
-    return None
-
-
-def run_unsupported(test: TestCase, device: Device) -> str | None:
-    return "runs indefinitely with latest clang"
-
-
-def run_broken(test, device):
-    return "all", "https://github.com/android/ndk/issues/1171"
diff --git a/tests/device/weak_symbols/jni/Android.mk b/tests/device/weak_symbols/jni/Android.mk
deleted file mode 100644
index 2c650ad..0000000
--- a/tests/device/weak_symbols/jni/Android.mk
+++ /dev/null
@@ -1,19 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := unconditional
-LOCAL_SRC_FILES := unconditional.cpp
-LOCAL_STATIC_LIBRARIES := googletest_main
-LOCAL_CFLAGS := -D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
-LOCAL_LDLIBS := -landroid
-include $(BUILD_EXECUTABLE)
-
-include $(CLEAR_VARS)
-LOCAL_MODULE := builtin_available
-LOCAL_SRC_FILES := builtin_available.cpp
-LOCAL_STATIC_LIBRARIES := googletest_main
-LOCAL_CFLAGS := -D__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
-LOCAL_LDLIBS := -landroid
-include $(BUILD_EXECUTABLE)
-
-$(call import-module,third_party/googletest)
\ No newline at end of file
diff --git a/tests/device/weak_symbols/jni/Application.mk b/tests/device/weak_symbols/jni/Application.mk
deleted file mode 100644
index 067c76f..0000000
--- a/tests/device/weak_symbols/jni/Application.mk
+++ /dev/null
@@ -1 +0,0 @@
-APP_STL := c++_static
\ No newline at end of file
diff --git a/tests/device/weak_symbols/jni/builtin_available.cpp b/tests/device/weak_symbols/jni/builtin_available.cpp
deleted file mode 100644
index 49ebf71..0000000
--- a/tests/device/weak_symbols/jni/builtin_available.cpp
+++ /dev/null
@@ -1,14 +0,0 @@
-#include <gtest/gtest.h>
-#include <android/trace.h>
-#include <android/api-level.h>
-
-TEST(weak_symbols, weak_symbol_enable) {
-     bool called = false;
-     if (__builtin_available(android 29, *)) {
-       // 0 is an arbitrary cookie. The specific value doesn't matter because
-       // this will never run concurrently.
-       ATrace_beginAsyncSection("ndk::asyncBeginEndSection", 0);
-       called = true;
-     }
-     ASSERT_EQ(android_get_device_api_level() >= 29, called);
-}
\ No newline at end of file
diff --git a/tests/device/weak_symbols/jni/unconditional.cpp b/tests/device/weak_symbols/jni/unconditional.cpp
deleted file mode 100644
index fb03e98..0000000
--- a/tests/device/weak_symbols/jni/unconditional.cpp
+++ /dev/null
@@ -1,23 +0,0 @@
-#include <gtest/gtest.h>
-#include <android/trace.h>
-#include <android/api-level.h>
-
-TEST(weak_symbols, crash_if_call_unavailable) {
-   if (android_get_device_api_level() >= 29) {
-     GTEST_SKIP() << "Test only valid for post-API 29 devices";
-   }
-   // 4770 is a cookie example from
-   // http://cs/android/cts/hostsidetests/atrace/AtraceTestApp/jni/CtsTrace.cpp;l=30;rcl=214cc4d8356fdb1ba4a63ae5baf86c6d76074233
-   ASSERT_DEATH(ATrace_beginAsyncSection("ndk::asyncBeginEndSection", 4770), "");
-}
-
-TEST(weak_symbols, pass_if_call_available) {
-   if (android_get_device_api_level() < 29) {
-     GTEST_SKIP() << "Test not valid for pre-API 29 devices";
-   }
-   // 4770 is a cookie example from
-   // http://cs/android/cts/hostsidetests/atrace/AtraceTestApp/jni/CtsTrace.cpp;l=30;rcl=214cc4d8356fdb1ba4a63ae5baf86c6d76074233
-   ATrace_beginAsyncSection("ndk::asyncBeginEndSection", 4770);
-}
-
-
diff --git a/tests/device/weak_symbols/test_config.py b/tests/device/weak_symbols/test_config.py
deleted file mode 100644
index d3387f6..0000000
--- a/tests/device/weak_symbols/test_config.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from ndk.test.spec import WeakSymbolsConfig
-
-
-def build_unsupported(test) -> bool:
-    # skip this test to avoid redefining __ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
-    if test.config.weak_symbol == WeakSymbolsConfig.WeakAPI:
-        return test.config.weak_symbol
-    return None
diff --git a/tests/device/weak_symbols_build_support/CMakeLists.txt b/tests/device/weak_symbols_build_support/CMakeLists.txt
deleted file mode 100644
index 5c89356..0000000
--- a/tests/device/weak_symbols_build_support/CMakeLists.txt
+++ /dev/null
@@ -1,12 +0,0 @@
-cmake_minimum_required(VERSION 3.22.1)
-project(WeakSymbolsBuildSupport CXX)
-
-set(GTEST_PATH "${ANDROID_NDK}/sources/third_party/googletest")
-
-add_executable(weak_symbols
-    jni/weak_symbols.cpp
-    ${GTEST_PATH}/src/gtest-all.cc
-    ${GTEST_PATH}/src/gtest_main.cc
-)
-target_include_directories(weak_symbols PRIVATE ${GTEST_PATH}/include ${GTEST_PATH})
-target_link_libraries(weak_symbols PRIVATE -landroid)
\ No newline at end of file
diff --git a/tests/device/weak_symbols_build_support/jni/Application.mk b/tests/device/weak_symbols_build_support/jni/Application.mk
deleted file mode 100644
index 44e2196..0000000
--- a/tests/device/weak_symbols_build_support/jni/Application.mk
+++ /dev/null
@@ -1,2 +0,0 @@
-APP_STL := c++_static
-APP_WEAK_API_DEFS := true
\ No newline at end of file
diff --git a/tests/device/weak_symbols_build_support/jni/weak_symbols.cpp b/tests/device/weak_symbols_build_support/jni/weak_symbols.cpp
deleted file mode 100644
index 49ebf71..0000000
--- a/tests/device/weak_symbols_build_support/jni/weak_symbols.cpp
+++ /dev/null
@@ -1,14 +0,0 @@
-#include <gtest/gtest.h>
-#include <android/trace.h>
-#include <android/api-level.h>
-
-TEST(weak_symbols, weak_symbol_enable) {
-     bool called = false;
-     if (__builtin_available(android 29, *)) {
-       // 0 is an arbitrary cookie. The specific value doesn't matter because
-       // this will never run concurrently.
-       ATrace_beginAsyncSection("ndk::asyncBeginEndSection", 0);
-       called = true;
-     }
-     ASSERT_EQ(android_get_device_api_level() >= 29, called);
-}
\ No newline at end of file
diff --git a/tests/device/weak_symbols_build_support/test_config.py b/tests/device/weak_symbols_build_support/test_config.py
deleted file mode 100644
index 719e372..0000000
--- a/tests/device/weak_symbols_build_support/test_config.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from ndk.test.spec import WeakSymbolsConfig
-
-
-def extra_cmake_flags() -> list[str]:
-    return ["-DANDROID_WEAK_API_DEFS=ON"]
-
-
-def build_unsupported(test) -> bool:
-    # skip this test to avoid redefining __ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__
-    if test.config.weak_symbol == WeakSymbolsConfig.WeakAPI:
-        return test.config.weak_symbol
-    return None
diff --git a/tests/device/yasm/test_config.py b/tests/device/yasm/test_config.py
index 781dbb2..fb2f15d 100644
--- a/tests/device/yasm/test_config.py
+++ b/tests/device/yasm/test_config.py
@@ -1,4 +1,4 @@
 def build_unsupported(test):
-    if test.config.abi not in ("x86", "x86_64"):
+    if test.config.abi not in ('x86', 'x86_64'):
         return test.config.abi
     return None
diff --git a/tests/libc++/test/README.md b/tests/libc++/test/README.md
new file mode 100644
index 0000000..39eacb2
--- /dev/null
+++ b/tests/libc++/test/README.md
@@ -0,0 +1,2 @@
+This is a fake test directory so we can add the libc++ test runner to the NDK
+test harness. The real tests are contained in the libc++ packaged in the NDK.
diff --git a/tests/libc++/test/libcxx/debug/test_config.py b/tests/libc++/test/libcxx/debug/test_config.py
new file mode 100644
index 0000000..eead71f
--- /dev/null
+++ b/tests/libc++/test/libcxx/debug/test_config.py
@@ -0,0 +1,5 @@
+def run_unsupported(test, device):
+    # Can't replace SIGABRT on old releases.
+    if device.version < 21 and test.case_name == 'debug_abort.pass':
+        return device.version
+    return None
diff --git a/tests/libc++/test/libcxx/strings/c.strings/test_config.py b/tests/libc++/test/libcxx/strings/c.strings/test_config.py
new file mode 100644
index 0000000..4b649b6
--- /dev/null
+++ b/tests/libc++/test/libcxx/strings/c.strings/test_config.py
@@ -0,0 +1,4 @@
+def build_broken(test):
+    if test.case_name == 'version_cuchar.pass':
+        return 'all', 'http://b/63679176'
+    return None, None
diff --git a/tests/libc++/test/std/depr/depr.c.headers/test_config.py b/tests/libc++/test/std/depr/depr.c.headers/test_config.py
new file mode 100644
index 0000000..a66bc5d
--- /dev/null
+++ b/tests/libc++/test/std/depr/depr.c.headers/test_config.py
@@ -0,0 +1,6 @@
+def build_broken(test):
+    if test.case_name == 'math_h_isnan.pass':
+        return 'all', 'http://b/34724220'
+    if test.case_name == 'math_h_isinf.pass' and test.config.api >= 21:
+        return f'android-{test.config.api}', 'http://b/34724220'
+    return None, None
diff --git a/tests/libc++/test/std/input.output/stream.buffers/streambuf/streambuf.protected/streambuf.put.area/test_config.py b/tests/libc++/test/std/input.output/stream.buffers/streambuf/streambuf.protected/streambuf.put.area/test_config.py
new file mode 100644
index 0000000..61d60d8
--- /dev/null
+++ b/tests/libc++/test/std/input.output/stream.buffers/streambuf/streambuf.protected/streambuf.put.area/test_config.py
@@ -0,0 +1,6 @@
+def run_unsupported(test, _device):
+    if test.case_name == 'pbump2gig.pass':
+        # This test attempts to allocate 2GiB of 'a', which doesn't work on a
+        # mobile device.
+        return 'all'
+    return None
diff --git a/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.array/test_config.py b/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.array/test_config.py
new file mode 100644
index 0000000..d7d5d17
--- /dev/null
+++ b/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.array/test_config.py
@@ -0,0 +1,7 @@
+def run_broken(test, device):
+    failing_tests = [
+        'new_array_nothrow_replace.pass',
+    ]
+    if test.case_name in failing_tests and device.version < 18:
+        return f'android-{device.version}', 'http://b/2643900'
+    return None, None
diff --git a/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.single/test_config.py b/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.single/test_config.py
new file mode 100644
index 0000000..35aeae8
--- /dev/null
+++ b/tests/libc++/test/std/language.support/support.dynamic/new.delete/new.delete.single/test_config.py
@@ -0,0 +1,4 @@
+def run_broken(test, device):
+    if test.case_name == 'new_nothrow_replace.pass' and device.version < 18:
+        return f'android-{device.version}', 'http://b/2643900'
+    return None, None
diff --git a/tests/libc++/test/std/language.support/support.start.term/test_config.py b/tests/libc++/test/std/language.support/support.start.term/test_config.py
new file mode 100644
index 0000000..81a7b0c
--- /dev/null
+++ b/tests/libc++/test/std/language.support/support.start.term/test_config.py
@@ -0,0 +1,4 @@
+def build_broken(test):
+    if test.case_name == 'quick_exit.pass' and test.config.api < 21:
+        return f'android-{test.config.api}', 'http://b/34719339'
+    return None, None
diff --git a/tests/libc++/test/std/localization/locale.categories/category.ctype/facet.ctype.special/facet.ctype.char.dtor/test_config.py b/tests/libc++/test/std/localization/locale.categories/category.ctype/facet.ctype.special/facet.ctype.char.dtor/test_config.py
new file mode 100644
index 0000000..2635d93
--- /dev/null
+++ b/tests/libc++/test/std/localization/locale.categories/category.ctype/facet.ctype.special/facet.ctype.char.dtor/test_config.py
@@ -0,0 +1,4 @@
+def run_broken(test, device):
+    if test.case_name == 'dtor.pass' and device.version < 18:
+        return f'android-{device.version}', 'http://b/2643900'
+    return None, None
diff --git a/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.nm.put/facet.num.put.members/test_config.py b/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.nm.put/facet.num.put.members/test_config.py
new file mode 100644
index 0000000..ad470aa
--- /dev/null
+++ b/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.nm.put/facet.num.put.members/test_config.py
@@ -0,0 +1,10 @@
+def run_broken(test, device):
+    is_lp64 = test.config.abi in ('arm64-v8a', 'x86_64')
+    if is_lp64 and test.case_name == 'put_long_double.pass':
+        return test.config.abi, 'http://b/34950416'
+    percent_f_tests = ('put_double.pass', 'put_long_double.pass')
+    if test.case_name in percent_f_tests and device.version < 21:
+        return f'android-{device.version}', 'http://b/35764716'
+    if test.case_name == 'put_long_double.pass':
+        return 'all?', 'http://b/63144639'
+    return None, None
diff --git a/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.num.get/facet.num.get.members/test_config.py b/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.num.get/facet.num.get.members/test_config.py
new file mode 100644
index 0000000..b30d18e
--- /dev/null
+++ b/tests/libc++/test/std/localization/locale.categories/category.numeric/locale.num.get/facet.num.get.members/test_config.py
@@ -0,0 +1,20 @@
+def run_broken(test, device):
+    is_lp64 = test.config.abi in ('arm64-v8a', 'x86_64')
+    failing_tests = (
+        'get_long_double.pass',
+    )
+    if is_lp64 and device.version < 26 and test.case_name in failing_tests:
+        return f'android-{device.version}', 'http://b/31101647'
+
+    if not is_lp64 and test.case_name == 'get_float.pass':
+        return test.config.abi, 'https://github.com/android-ndk/ndk/issues/415'
+
+    percent_a_tests = (
+        'get_double.pass',
+        'get_long_double.pass',
+    )
+    if test.case_name in percent_a_tests and device.version < 21:
+        bug = 'https://github.com/android-ndk/ndk/issues/437'
+        return f'android-{device.version}', bug
+
+    return None, None
diff --git a/tests/libc++/test/std/numerics/c.math/test_config.py b/tests/libc++/test/std/numerics/c.math/test_config.py
new file mode 100644
index 0000000..2dcd3fb
--- /dev/null
+++ b/tests/libc++/test/std/numerics/c.math/test_config.py
@@ -0,0 +1,6 @@
+def build_broken(test):
+    if test.case_name == 'cmath_isnan.pass':
+        return 'all', 'http://b/34724220'
+    if test.case_name == 'cmath_isinf.pass' and test.config.api >= 21:
+        return f'android-{test.config.api}', 'http://b/34724220'
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bernoulli/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bernoulli/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bernoulli/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bin/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bin/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.bin/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.geo/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.geo/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.geo/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.negbin/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.negbin/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.bern/rand.dist.bern.negbin/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.cauchy/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.cauchy/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.cauchy/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.chisq/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.chisq/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.chisq/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.f/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.f/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.f/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.lognormal/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.lognormal/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.lognormal/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.t/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.t/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.t/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.exp/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.exp/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.exp/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.extreme/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.extreme/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.extreme/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.gamma/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.gamma/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.gamma/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.poisson/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.poisson/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.poisson/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.weibull/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.weibull/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.pois/rand.dist.pois.weibull/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.discrete/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.discrete/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.discrete/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.pconst/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.pconst/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.pconst/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.plinear/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.plinear/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.samp/rand.dist.samp.plinear/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.uni/rand.dist.uni.real/test_config.py b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.uni/rand.dist.uni.real/test_config.py
new file mode 100644
index 0000000..01e63c4
--- /dev/null
+++ b/tests/libc++/test/std/numerics/rand/rand.dis/rand.dist.uni/rand.dist.uni.real/test_config.py
@@ -0,0 +1,5 @@
+def run_broken(test, device):
+    if device.version < 21 and test.case_name == 'io.pass':
+        bug = 'https://issuetracker.google.com/36988114'
+        return f'android-{device.version}', bug
+    return None, None
diff --git a/tests/libc++/test/std/strings/c.strings/test_config.py b/tests/libc++/test/std/strings/c.strings/test_config.py
new file mode 100644
index 0000000..2647185
--- /dev/null
+++ b/tests/libc++/test/std/strings/c.strings/test_config.py
@@ -0,0 +1,4 @@
+def build_broken(test):
+    if test.case_name == 'cuchar.pass':
+        return 'all', 'http://b/63679176'
+    return None, None
diff --git a/tests/libc++/test/std/strings/string.conversions/test_config.py b/tests/libc++/test/std/strings/string.conversions/test_config.py
new file mode 100644
index 0000000..c5618f4
--- /dev/null
+++ b/tests/libc++/test/std/strings/string.conversions/test_config.py
@@ -0,0 +1,7 @@
+def run_broken(test, device):
+    is_lp64 = test.config.abi in ('arm64-v8a', 'x86_64')
+    if device.version < 26 and is_lp64 and test.case_name == 'stold.pass':
+        return f'android-{device.version}', 'http://b/31101647'
+    if not is_lp64 and test.case_name == 'stof.pass':
+        return 'all', 'http://b/34739876'
+    return None, None
diff --git a/tests/libc++/test/std/thread/thread.threads/thread.thread.class/thread.thread.constr/test_config.py b/tests/libc++/test/std/thread/thread.threads/thread.thread.class/thread.thread.constr/test_config.py
new file mode 100644
index 0000000..14d7bfe
--- /dev/null
+++ b/tests/libc++/test/std/thread/thread.threads/thread.thread.class/thread.thread.constr/test_config.py
@@ -0,0 +1,4 @@
+def run_broken(test, device):
+    if test.case_name == 'F.pass' and device.version < 18:
+        return f'android-{device.version}', 'http://b/2643900'
+    return None, None
diff --git a/tests/pytest/ndkstack/__init__.py b/tests/ndk-stack/__init__.py
similarity index 100%
rename from tests/pytest/ndkstack/__init__.py
rename to tests/ndk-stack/__init__.py
diff --git a/tests/pytest/ndkstack/files/backtrace.txt b/tests/ndk-stack/files/backtrace.txt
similarity index 100%
rename from tests/pytest/ndkstack/files/backtrace.txt
rename to tests/ndk-stack/files/backtrace.txt
diff --git a/tests/pytest/ndkstack/files/expected.txt b/tests/ndk-stack/files/expected.txt
similarity index 94%
rename from tests/pytest/ndkstack/files/expected.txt
rename to tests/ndk-stack/files/expected.txt
index 64e9002..bf96b9c 100644
--- a/tests/pytest/ndkstack/files/expected.txt
+++ b/tests/ndk-stack/files/expected.txt
@@ -1,9 +1,9 @@
 ********** Crash dump: **********
 #00 0x0000e4fc test.apk!libbase.so (offset 0x1000)
-                                    android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>> const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>>*)
+                                    android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >*)
                                     system/core/base/file.cpp:365:0
 #01 0x0000e4fc test.apk!libbase.so (offset 0x1000)
-                                    android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>> const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char>>*)
+                                    android::base::RemoveFileIfExists(std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> > const&, std::__1::basic_string<char, std::__1::char_traits<char>, std::__1::allocator<char> >*)
                                     system/core/base/file.cpp:365:0
 #02 0x00057250 test.apk!libc.so (offset 0x14000)
                                  __memcpy_a15
diff --git a/tests/pytest/ndkstack/files/expected_multiple.txt b/tests/ndk-stack/files/expected_multiple.txt
similarity index 100%
rename from tests/pytest/ndkstack/files/expected_multiple.txt
rename to tests/ndk-stack/files/expected_multiple.txt
diff --git a/tests/pytest/ndkstack/files/libbase.so b/tests/ndk-stack/files/libbase.so
similarity index 100%
rename from tests/pytest/ndkstack/files/libbase.so
rename to tests/ndk-stack/files/libbase.so
Binary files differ
diff --git a/tests/pytest/ndkstack/files/libc.so b/tests/ndk-stack/files/libc.so
similarity index 100%
rename from tests/pytest/ndkstack/files/libc.so
rename to tests/ndk-stack/files/libc.so
Binary files differ
diff --git a/tests/pytest/ndkstack/files/libc64.so b/tests/ndk-stack/files/libc64.so
similarity index 100%
rename from tests/pytest/ndkstack/files/libc64.so
rename to tests/ndk-stack/files/libc64.so
Binary files differ
diff --git a/tests/pytest/ndkstack/files/libutils.so b/tests/ndk-stack/files/libutils.so
similarity index 100%
rename from tests/pytest/ndkstack/files/libutils.so
rename to tests/ndk-stack/files/libutils.so
Binary files differ
diff --git a/tests/pytest/ndkstack/files/libziparchive.so b/tests/ndk-stack/files/libziparchive.so
similarity index 100%
rename from tests/pytest/ndkstack/files/libziparchive.so
rename to tests/ndk-stack/files/libziparchive.so
Binary files differ
diff --git a/tests/pytest/ndkstack/files/multiple.txt b/tests/ndk-stack/files/multiple.txt
similarity index 100%
rename from tests/pytest/ndkstack/files/multiple.txt
rename to tests/ndk-stack/files/multiple.txt
diff --git a/tests/pytest/ndkstack/files/test.apk b/tests/ndk-stack/files/test.apk
similarity index 100%
rename from tests/pytest/ndkstack/files/test.apk
rename to tests/ndk-stack/files/test.apk
Binary files differ
diff --git a/tests/ndk-stack/ndk_stack_systemtest.py b/tests/ndk-stack/ndk_stack_systemtest.py
new file mode 100755
index 0000000..7a091bb
--- /dev/null
+++ b/tests/ndk-stack/ndk_stack_systemtest.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""System tests for ndk-stack.py"""
+
+from __future__ import print_function
+
+from io import StringIO
+import os.path
+import sys
+import unittest
+from unittest.mock import patch
+
+sys.path.insert(0, '../..')
+ndk_stack = __import__('ndk-stack')
+
+import ndk.hosts  # pylint:disable=wrong-import-position
+import ndk.toolchains  # pylint:disable=wrong-import-position
+
+
+class SystemTests(unittest.TestCase):
+    """Complete system test of ndk-stack.py script."""
+
+    def setUp(self):
+        default_host = ndk.hosts.get_default_host()
+        clang_toolchain = ndk.toolchains.ClangToolchain(default_host)
+
+        # First try and use the normal functions, and if they fail, then
+        # use hard-coded paths from the development locations.
+        ndk_paths = ndk_stack.get_ndk_paths()
+        self.readelf = ndk_stack.find_readelf(*ndk_paths)
+        if not self.readelf:
+            self.readelf = clang_toolchain.gcc_toolchain.gcc_tool('readelf')
+        self.assertTrue(self.readelf)
+        self.assertTrue(os.path.exists(self.readelf))
+
+        try:
+            self.llvm_symbolizer = ndk_stack.find_llvm_symbolizer(*ndk_paths)
+        except OSError:
+            self.llvm_symbolizer = str(
+                clang_toolchain.clang_tool('llvm-symbolizer'))
+        self.assertTrue(self.llvm_symbolizer)
+        self.assertTrue(os.path.exists(self.llvm_symbolizer))
+
+    @patch.object(ndk_stack, 'find_llvm_symbolizer')
+    @patch.object(ndk_stack, 'find_readelf')
+    def system_test(self, backtrace_file, expected_file, mock_readelf, mock_llvm_symbolizer):
+        mock_readelf.return_value = self.readelf
+        mock_llvm_symbolizer.return_value = self.llvm_symbolizer
+
+        symbol_dir = os.path.join(
+            os.path.dirname(os.path.realpath(__file__)), 'files')
+        with patch('sys.stdout', new_callable=StringIO) as mock_stdout:
+            ndk_stack.main([
+                '-s', symbol_dir, '-i',
+                os.path.join(symbol_dir, backtrace_file)
+            ])
+
+        # Read the expected output.
+        file_name = os.path.join(symbol_dir, expected_file)
+        with open(mode='r', file=file_name) as exp_file:
+            expected = exp_file.read()
+        expected = expected.replace('SYMBOL_DIR', symbol_dir)
+        self.maxDiff = None
+        self.assertEqual(expected, mock_stdout.getvalue())
+
+    def test_all_stacks(self):
+        self.system_test('backtrace.txt', 'expected.txt')
+
+    def test_multiple_crashes(self):
+        self.system_test('multiple.txt', 'expected_multiple.txt')
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/tests/ndk-stack/ndk_stack_unittest.py b/tests/ndk-stack/ndk_stack_unittest.py
new file mode 100755
index 0000000..64c9dff
--- /dev/null
+++ b/tests/ndk-stack/ndk_stack_unittest.py
@@ -0,0 +1,466 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Unittests for ndk-stack.py"""
+
+from __future__ import print_function
+
+import os.path
+import sys
+import textwrap
+import unittest
+
+from unittest import mock
+from unittest.mock import patch
+
+try:
+    # Python 2
+    from cStringIO import StringIO
+except ModuleNotFoundError:  # pylint:disable=undefined-variable
+    # Python 3
+    from io import StringIO
+
+sys.path.insert(0, '../..')
+ndk_stack = __import__('ndk-stack')
+
+
+@patch('os.path.exists')
+class PathTests(unittest.TestCase):
+    """Tests of find_llvm_symbolizer() and find_readelf()."""
+
+    def setUp(self):
+        self.ndk_paths = ('/ndk_fake', '/ndk_fake/bin', 'linux-x86_64')
+        exe_suffix = '.EXE' if os.name == 'nt' else ''
+        self.llvm_symbolizer = 'llvm-symbolizer' + exe_suffix
+        self.readelf = 'readelf' + exe_suffix
+
+    def test_find_llvm_symbolizer_in_prebuilt(self, mock_exists):
+        expected_path = os.path.join('/ndk_fake', 'toolchains', 'llvm',
+                                     'prebuilt', 'linux-x86_64', 'bin',
+                                     self.llvm_symbolizer)
+        mock_exists.return_value = True
+        self.assertEqual(expected_path,
+                         ndk_stack.find_llvm_symbolizer(*self.ndk_paths))
+        mock_exists.assert_called_once_with(expected_path)
+
+    def test_find_llvm_symbolizer_in_standalone_toolchain(self, mock_exists):
+        prebuilt_path = os.path.join('/ndk_fake', 'toolchains', 'llvm',
+                                     'prebuilt', 'linux-x86_64', 'bin',
+                                     self.llvm_symbolizer)
+        expected_path = os.path.join('/ndk_fake', 'bin', self.llvm_symbolizer)
+        mock_exists.side_effect = [False, True]
+        self.assertEqual(expected_path,
+                         ndk_stack.find_llvm_symbolizer(*self.ndk_paths))
+        mock_exists.assert_has_calls(
+            [mock.call(prebuilt_path),
+             mock.call(expected_path)])
+
+    def test_llvm_symbolizer_not_found(self, mock_exists):
+        mock_exists.return_value = False
+        with self.assertRaises(OSError) as cm:
+            ndk_stack.find_llvm_symbolizer(*self.ndk_paths)
+        self.assertEqual('Unable to find llvm-symbolizer', str(cm.exception))
+
+    def test_find_readelf_in_prebuilt(self, mock_exists):
+        expected_path = os.path.join(
+            '/ndk_fake', 'toolchains', 'llvm', 'prebuilt', 'linux-x86_64',
+            'x86_64-linux-android', 'bin', self.readelf)
+        mock_exists.return_value = True
+        self.assertEqual(expected_path,
+                         ndk_stack.find_readelf(*self.ndk_paths))
+        mock_exists.assert_called_once_with(expected_path)
+
+    def test_find_readelf_in_prebuilt_arm(self, mock_exists):
+        expected_path = os.path.join(
+            '/ndk_fake', 'toolchains', 'llvm', 'prebuilt', 'linux-arm',
+            'arm-linux-androideabi', 'bin', self.readelf)
+        mock_exists.return_value = True
+        self.assertEqual(
+            expected_path,
+            ndk_stack.find_readelf('/ndk_fake', '/ndk_fake/bin', 'linux-arm'))
+        mock_exists.assert_called_once_with(expected_path)
+
+    def test_find_readelf_in_standalone_toolchain(self, mock_exists):
+        for arch in [
+                'aarch64-linux-android', 'arm-linux-androideabi',
+                'i686-linux-android', 'x86_64-linux-android'
+        ]:
+            mock_exists.reset_mock()
+            expected_path = os.path.join('/ndk_fake', arch, 'bin',
+                                         self.readelf)
+            mock_exists.side_effect = [False, True]
+            os.path.exists = lambda path, exp=expected_path: path == exp
+            self.assertEqual(expected_path,
+                             ndk_stack.find_readelf(*self.ndk_paths))
+
+    def test_readelf_not_found(self, mock_exists):
+        mock_exists.return_value = False
+        self.assertFalse(ndk_stack.find_readelf(*self.ndk_paths))
+
+
+class FrameTests(unittest.TestCase):
+    """Test parsing of backtrace lines."""
+
+    def test_line_with_map_name(self):
+        line = '  #14 pc 00001000  /fake/libfake.so'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#14', frame_info.num)
+        self.assertEqual('00001000', frame_info.pc)
+        self.assertEqual('/fake/libfake.so', frame_info.tail)
+        self.assertEqual('/fake/libfake.so', frame_info.elf_file)
+        self.assertFalse(frame_info.offset)
+        self.assertFalse(frame_info.container_file)
+        self.assertFalse(frame_info.build_id)
+
+    def test_line_with_function(self):
+        line = '  #08 pc 00001040  /fake/libfake.so (func())'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#08', frame_info.num)
+        self.assertEqual('00001040', frame_info.pc)
+        self.assertEqual('/fake/libfake.so (func())', frame_info.tail)
+        self.assertEqual('/fake/libfake.so', frame_info.elf_file)
+        self.assertFalse(frame_info.offset)
+        self.assertFalse(frame_info.container_file)
+        self.assertFalse(frame_info.build_id)
+
+    def test_line_with_offset(self):
+        line = '  #04 pc 00002050  /fake/libfake.so (offset 0x2000)'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#04', frame_info.num)
+        self.assertEqual('00002050', frame_info.pc)
+        self.assertEqual('/fake/libfake.so (offset 0x2000)', frame_info.tail)
+        self.assertEqual('/fake/libfake.so', frame_info.elf_file)
+        self.assertEqual(0x2000, frame_info.offset)
+        self.assertFalse(frame_info.container_file)
+        self.assertFalse(frame_info.build_id)
+
+    def test_line_with_build_id(self):
+        line = '  #03 pc 00002050  /fake/libfake.so (BuildId: d1d420a58366bf29f1312ec826f16564)'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#03', frame_info.num)
+        self.assertEqual('00002050', frame_info.pc)
+        self.assertEqual(
+            '/fake/libfake.so (BuildId: d1d420a58366bf29f1312ec826f16564)',
+            frame_info.tail)
+        self.assertEqual('/fake/libfake.so', frame_info.elf_file)
+        self.assertFalse(frame_info.offset)
+        self.assertFalse(frame_info.container_file)
+        self.assertEqual('d1d420a58366bf29f1312ec826f16564',
+                         frame_info.build_id)
+
+    def test_line_with_container_file(self):
+        line = '  #10 pc 00003050  /fake/fake.apk!libc.so'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#10', frame_info.num)
+        self.assertEqual('00003050', frame_info.pc)
+        self.assertEqual('/fake/fake.apk!libc.so', frame_info.tail)
+        self.assertEqual('libc.so', frame_info.elf_file)
+        self.assertFalse(frame_info.offset)
+        self.assertEqual('/fake/fake.apk', frame_info.container_file)
+        self.assertFalse(frame_info.build_id)
+
+    def test_line_with_container_and_elf_equal(self):
+        line = '  #12 pc 00004050  /fake/libc.so!lib/libc.so'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#12', frame_info.num)
+        self.assertEqual('00004050', frame_info.pc)
+        self.assertEqual('/fake/libc.so!lib/libc.so', frame_info.tail)
+        self.assertEqual('/fake/libc.so', frame_info.elf_file)
+        self.assertFalse(frame_info.offset)
+        self.assertFalse(frame_info.container_file)
+        self.assertFalse(frame_info.build_id)
+
+    def test_line_everything(self):
+        line = ('  #07 pc 00823fc  /fake/fake.apk!libc.so (__start_thread+64) '
+                '(offset 0x1000) (BuildId: 6a0c10d19d5bf39a5a78fa514371dab3)')
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        self.assertEqual('#07', frame_info.num)
+        self.assertEqual('00823fc', frame_info.pc)
+        self.assertEqual(
+            '/fake/fake.apk!libc.so (__start_thread+64) '
+            '(offset 0x1000) (BuildId: 6a0c10d19d5bf39a5a78fa514371dab3)',
+            frame_info.tail)
+        self.assertEqual('libc.so', frame_info.elf_file)
+        self.assertEqual(0x1000, frame_info.offset)
+        self.assertEqual('/fake/fake.apk', frame_info.container_file)
+        self.assertEqual('6a0c10d19d5bf39a5a78fa514371dab3',
+                         frame_info.build_id)
+
+
+@patch.object(ndk_stack, 'get_build_id')
+@patch('os.path.exists')
+class VerifyElfFileTests(unittest.TestCase):
+    """Tests of verify_elf_file()."""
+
+    def create_frame_info(self):
+        line = '  #03 pc 00002050  /fake/libfake.so'
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        return frame_info
+
+    def test_elf_file_does_not_exist(self, mock_exists, _):
+        mock_exists.return_value = False
+        frame_info = self.create_frame_info()
+        self.assertFalse(
+            frame_info.verify_elf_file(None, '/fake/libfake.so', 'libfake.so'))
+        self.assertFalse(
+            frame_info.verify_elf_file('readelf', '/fake/libfake.so',
+                                       'libfake.so'))
+
+    def test_elf_file_build_id_matches(self, mock_exists, mock_get_build_id):
+        mock_exists.return_value = True
+        frame_info = self.create_frame_info()
+        frame_info.build_id = 'MOCKED_BUILD_ID'
+        self.assertTrue(
+            frame_info.verify_elf_file(None, '/mocked/libfake.so',
+                                       'libfake.so'))
+        mock_get_build_id.assert_not_called()
+
+        mock_get_build_id.return_value = 'MOCKED_BUILD_ID'
+        self.assertTrue(
+            frame_info.verify_elf_file('readelf', '/mocked/libfake.so',
+                                       'libfake.so'))
+        mock_get_build_id.assert_called_once_with('readelf',
+                                                  '/mocked/libfake.so')
+
+    def test_elf_file_build_id_does_not_match(self, mock_exists,
+                                              mock_get_build_id):
+        mock_exists.return_value = True
+        mock_get_build_id.return_value = 'MOCKED_BUILD_ID'
+        frame_info = self.create_frame_info()
+        frame_info.build_id = 'DIFFERENT_BUILD_ID'
+        with patch('sys.stdout', new_callable=StringIO) as mock_stdout:
+            self.assertTrue(
+                frame_info.verify_elf_file(None, '/mocked/libfake.so',
+                                           'none.so'))
+            self.assertFalse(
+                frame_info.verify_elf_file('readelf', '/mocked/libfake.so',
+                                           'display.so'))
+        output = textwrap.dedent("""\
+            WARNING: Mismatched build id for display.so
+            WARNING:   Expected DIFFERENT_BUILD_ID
+            WARNING:   Found    MOCKED_BUILD_ID
+        """)
+        self.assertEqual(output, mock_stdout.getvalue())
+
+
+class GetZipInfoFromOffsetTests(unittest.TestCase):
+    """Tests of get_zip_info_from_offset()."""
+
+    def setUp(self):
+        self.mock_zip = mock.MagicMock()
+        self.mock_zip.filename = '/fake/zip.apk'
+        self.mock_zip.infolist.return_value = []
+
+    def test_file_does_not_exist(self):
+        with self.assertRaises(IOError):
+            _ = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1000)
+
+    @patch('os.stat')
+    def test_offset_ge_file_size(self, mock_stat):
+        mock_stat.return_value.st_size = 0x1000
+        self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1000))
+        self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1100))
+
+    @patch('os.stat')
+    def test_empty_infolist(self, mock_stat):
+        mock_stat.return_value.st_size = 0x1000
+        self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x900))
+
+    @patch('os.stat')
+    def test_zip_info_single_element(self, mock_stat):
+        mock_stat.return_value.st_size = 0x2000
+
+        mock_zip_info = mock.MagicMock()
+        mock_zip_info.header_offset = 0x100
+        self.mock_zip.infolist.return_value = [mock_zip_info]
+
+        self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x50))
+
+        self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x2000))
+
+        zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x200)
+        self.assertEqual(0x100, zip_info.header_offset)
+
+    @patch('os.stat')
+    def test_zip_info_checks(self, mock_stat):
+        mock_stat.return_value.st_size = 0x2000
+
+        mock_zip_info1 = mock.MagicMock()
+        mock_zip_info1.header_offset = 0x100
+        mock_zip_info2 = mock.MagicMock()
+        mock_zip_info2.header_offset = 0x1000
+        self.mock_zip.infolist.return_value = [mock_zip_info1, mock_zip_info2]
+
+        self.assertFalse(ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x50))
+
+        zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x200)
+        self.assertEqual(0x100, zip_info.header_offset)
+
+        zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x100)
+        self.assertEqual(0x100, zip_info.header_offset)
+
+        zip_info = ndk_stack.get_zip_info_from_offset(self.mock_zip, 0x1000)
+        self.assertEqual(0x1000, zip_info.header_offset)
+
+
+class GetElfFileTests(unittest.TestCase):
+    """Tests of FrameInfo.get_elf_file()."""
+
+    def setUp(self):
+        self.mock_zipfile = mock.MagicMock()
+        self.mock_zipfile.extract.return_value = '/fake_tmp/libtest.so'
+        self.mock_zipfile.__enter__.return_value = self.mock_zipfile
+
+        self.mock_tmp = mock.MagicMock()
+        self.mock_tmp.get_directory.return_value = '/fake_tmp'
+
+    def create_frame_info(self, tail):
+        line = '  #03 pc 00002050  ' + tail
+        frame_info = ndk_stack.FrameInfo.from_line(line)
+        self.assertTrue(frame_info)
+        frame_info.verify_elf_file = mock.MagicMock()
+        return frame_info
+
+    def test_file_only(self):
+        frame_info = self.create_frame_info('/fake/libfake.so')
+        frame_info.verify_elf_file.return_value = True
+        self.assertEqual(
+            '/fake_dir/symbols/libfake.so',
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        frame_info.verify_elf_file.reset_mock()
+        frame_info.verify_elf_file.return_value = False
+        self.assertFalse(
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/libfake.so', frame_info.tail)
+
+    def test_container_set_elf_in_symbol_dir(self):
+        frame_info = self.create_frame_info('/fake/fake.apk!libtest.so')
+        frame_info.verify_elf_file.return_value = True
+        self.assertEqual(
+            '/fake_dir/symbols/libtest.so',
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    def test_container_set_elf_not_in_symbol_dir_apk_does_not_exist(self):
+        frame_info = self.create_frame_info('/fake/fake.apk!libtest.so')
+        frame_info.verify_elf_file.return_value = False
+        with self.assertRaises(IOError):
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp)
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_container_set_elf_not_in_apk(self, _,
+                                          mock_get_zip_info):
+        mock_get_zip_info.return_value = None
+        frame_info = self.create_frame_info('/fake/fake.apk!libtest.so')
+        frame_info.verify_elf_file.return_value = False
+        self.assertFalse(
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_container_set_elf_in_apk(self, mock_zipclass, mock_get_zip_info):
+        mock_zipclass.return_value = self.mock_zipfile
+        mock_get_zip_info.return_value.filename = 'libtest.so'
+
+        frame_info = self.create_frame_info('/fake/fake.apk!libtest.so')
+        frame_info.verify_elf_file.side_effect = [False, True]
+        self.assertEqual(
+            '/fake_tmp/libtest.so',
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_container_set_elf_in_apk_verify_fails(self, mock_zipclass,
+                                                   mock_get_zip_info):
+        mock_zipclass.return_value = self.mock_zipfile
+        mock_get_zip_info.return_value.filename = 'libtest.so'
+
+        frame_info = self.create_frame_info('/fake/fake.apk!libtest.so')
+        frame_info.verify_elf_file.side_effect = [False, False]
+        self.assertFalse(
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    def test_in_apk_file_does_not_exist(self):
+        frame_info = self.create_frame_info('/fake/fake.apk')
+        frame_info.verify_elf_file.return_value = False
+        with self.assertRaises(IOError):
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp)
+        self.assertEqual('/fake/fake.apk', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_in_apk_elf_not_in_apk(self, _, mock_get_zip_info):
+        mock_get_zip_info.return_value = None
+        frame_info = self.create_frame_info('/fake/fake.apk')
+        self.assertFalse(
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_in_apk_elf_in_symbol_dir(self, mock_zipclass, mock_get_zip_info):
+        mock_zipclass.return_value = self.mock_zipfile
+        mock_get_zip_info.return_value.filename = 'libtest.so'
+
+        frame_info = self.create_frame_info('/fake/fake.apk')
+        frame_info.verify_elf_file.return_value = True
+        self.assertEqual(
+            '/fake_dir/symbols/libtest.so',
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_in_apk_elf_in_apk(self, mock_zipclass, mock_get_zip_info):
+        mock_zipclass.return_value = self.mock_zipfile
+        mock_get_zip_info.return_value.filename = 'libtest.so'
+
+        frame_info = self.create_frame_info('/fake/fake.apk')
+        frame_info.verify_elf_file.side_effect = [False, True]
+        self.assertEqual(
+            '/fake_tmp/libtest.so',
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+    @patch.object(ndk_stack, 'get_zip_info_from_offset')
+    @patch('zipfile.ZipFile')
+    def test_in_apk_elf_in_apk_verify_fails(self, mock_zipclass,
+                                            mock_get_zip_info):
+        mock_zipclass.return_value = self.mock_zipfile
+        mock_get_zip_info.return_value.filename = 'libtest.so'
+
+        frame_info = self.create_frame_info('/fake/fake.apk')
+        frame_info.verify_elf_file.side_effect = [False, False]
+        self.assertFalse(
+            frame_info.get_elf_file('/fake_dir/symbols', None, self.mock_tmp))
+        self.assertEqual('/fake/fake.apk!libtest.so', frame_info.tail)
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/ndk-stack/pylintrc b/tests/ndk-stack/pylintrc
new file mode 120000
index 0000000..e242284
--- /dev/null
+++ b/tests/ndk-stack/pylintrc
@@ -0,0 +1 @@
+../../pylintrc
\ No newline at end of file
diff --git a/tests/pytest/__init__.py b/tests/pytest/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/tests/pytest/__init__.py
+++ /dev/null
diff --git a/tests/pytest/ndkstack/files/expected_hwasan.txt b/tests/pytest/ndkstack/files/expected_hwasan.txt
deleted file mode 100644
index a355d42..0000000
--- a/tests/pytest/ndkstack/files/expected_hwasan.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-********** Crash dump: **********
-Build fingerprint: 'Android/aosp_taimen/taimen:R/QT/eng.cferri.20191008.133647:userdebug/test-keys'
-Abort message: '==21586==ERROR: HWAddressSanitizer: tag-mismatch on address 0x0042a0807af0 at pc 0x007b23b8786c
-#0 0x7b138 /data/app/com.example.hellohwasan-MBZF010sEpmy2cZikx49pQ==/lib/arm64/libc64.so
-android_set_abort_message
-bionic/libc/bionic/android_set_abort_message.cpp:88:3
-#1 0x198ccc /apex/com.android.art/lib64/libart.so
-#0 0x7b15c /data/app/com.example.hellohwasan-MBZF010sEpmy2cZikx49pQ==/lib/arm64/libc64.so
-android_set_abort_message
-bionic/libc/bionic/android_set_abort_message.cpp:92:37
-#00 0x000000000007f3ec /system/lib64/bootstrap/libc64.so (abort+172) (BuildId: d0080326fbef53980488aedd32b02f5d)
-                                                          abort
-                                                          bionic/libc/bionic/abort.cpp:50:3
diff --git a/tests/pytest/ndkstack/files/expected_invalid_unicode_log.txt b/tests/pytest/ndkstack/files/expected_invalid_unicode_log.txt
deleted file mode 100644
index e69de29..0000000
--- a/tests/pytest/ndkstack/files/expected_invalid_unicode_log.txt
+++ /dev/null
diff --git a/tests/pytest/ndkstack/files/hwasan.txt b/tests/pytest/ndkstack/files/hwasan.txt
deleted file mode 100644
index 8962841..0000000
--- a/tests/pytest/ndkstack/files/hwasan.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-*** *** *** *** *** *** *** *** *** *** *** *** *** *** *** ***
-Build fingerprint: 'Android/aosp_taimen/taimen:R/QT/eng.cferri.20191008.133647:userdebug/test-keys'
-Revision: 'rev_a'
-ABI: 'arm64'
-Timestamp: 2019-10-10 21:15:11+0000
-pid: 21586, tid: 21586, name: ple.hellohwasan  >>> com.example.hellohwasan <<<
-uid: 0
-signal 6 (SIGABRT), code -1 (SI_QUEUE), fault addr --------
-Abort message: '==21586==ERROR: HWAddressSanitizer: tag-mismatch on address 0x0042a0807af0 at pc 0x007b23b8786c
-WRITE of size 1 at 0x0042a0807af0 tags: db/19 (ptr/mem) in thread T0
-    #0 0x7b23b87868  (/data/app/com.example.hellohwasan-MBZF010sEpmy2cZikx49pQ==/lib/arm64/libc64.so+0x7b138)
-    #1 0x7b8f1e4ccc  (/apex/com.android.art/lib64/libart.so+0x198ccc)
-
-[0x0042a0807ae0,0x0042a0807b00) is a small allocated heap chunk; size: 32 offset: 16
-0x0042a0807af0 is located 0 bytes to the right of 16-byte region [0x0042a0807ae0,0x0042a0807af0)
-allocated here:
-    #0 0x7b23b87868  (/data/app/com.example.hellohwasan-MBZF010sEpmy2cZikx49pQ==/lib/arm64/libc64.so+0x7b15c)
-
-Thread: T325 0x00720040e000 stack: [0x007b7f731000,0x007b7f82acc0) sz: 1023168 tls: [0x000000000000,0x000000000000)
-Thread: T324 0x00720040a000 stack: [0x007b80841000,0x007b8093acc0) sz: 1023168 tls: [0x000000000000,0x000000000000)
-Memory tags around the buggy address (one tag corresponds to 16 bytes):
-   52  52  57  57  c2  08  27  2b  90  99  08  66  eb  eb  87  87   
-   6f  6f  2f  2f  3f  3f  8d  8d  97  97  1d  1d  8d  8d  f5  f5   
-
-backtrace:
-      #00 pc 000000000007f3ec  /system/lib64/bootstrap/libc64.so (abort+172) (BuildId: d0080326fbef53980488aedd32b02f5d)
diff --git a/tests/pytest/ndkstack/files/invalid_unicode_log.txt b/tests/pytest/ndkstack/files/invalid_unicode_log.txt
deleted file mode 100644
index 3f02cbc..0000000
--- a/tests/pytest/ndkstack/files/invalid_unicode_log.txt
+++ /dev/null
@@ -1 +0,0 @@
-01-08 15:14:19.624   559   638 E NativeTombstoneManager: Tombstone has invalid selinux label (u:r:priv_app:s0:c512,c768), ignoring
diff --git a/tests/pytest/ndkstack/test_ndkstack.py b/tests/pytest/ndkstack/test_ndkstack.py
deleted file mode 100755
index 92e50a8..0000000
--- a/tests/pytest/ndkstack/test_ndkstack.py
+++ /dev/null
@@ -1,501 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""Unittests for ndk-stack.py"""
-import textwrap
-import unittest
-from io import StringIO
-from pathlib import Path, PurePosixPath
-from typing import Any
-from unittest import mock
-from unittest.mock import Mock, patch
-
-import pytest
-
-import ndkstack
-
-
-class TestFindLlvmSymbolizer:
-    def test_find_in_prebuilt(self, tmp_path: Path) -> None:
-        ndk_path = tmp_path / "ndk"
-        symbolizer_path = (
-            ndk_path / "toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-symbolizer"
-        )
-        symbolizer_path = symbolizer_path.with_suffix(ndkstack.EXE_SUFFIX)
-        symbolizer_path.parent.mkdir(parents=True)
-        symbolizer_path.touch()
-        assert (
-            ndkstack.find_llvm_symbolizer(ndk_path, ndk_path / "bin", "linux-x86_64")
-            == symbolizer_path
-        )
-
-    def test_find_in_standalone_toolchain(self, tmp_path: Path) -> None:
-        ndk_path = tmp_path / "ndk"
-        symbolizer_path = ndk_path / "bin/llvm-symbolizer"
-        symbolizer_path = symbolizer_path.with_suffix(ndkstack.EXE_SUFFIX)
-        symbolizer_path.parent.mkdir(parents=True)
-        symbolizer_path.touch()
-        assert (
-            ndkstack.find_llvm_symbolizer(ndk_path, ndk_path / "bin", "linux-x86_64")
-            == symbolizer_path
-        )
-
-    def test_not_found(self, tmp_path: Path) -> None:
-        with pytest.raises(OSError, match="Unable to find llvm-symbolizer"):
-            ndkstack.find_llvm_symbolizer(tmp_path, tmp_path / "bin", "linux-x86_64")
-
-
-class TestFindReadelf:
-    def test_find_in_prebuilt(self, tmp_path: Path) -> None:
-        ndk_path = tmp_path / "ndk"
-        readelf_path = (
-            ndk_path / "toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-readelf"
-        )
-        readelf_path = readelf_path.with_suffix(ndkstack.EXE_SUFFIX)
-        readelf_path.parent.mkdir(parents=True)
-        readelf_path.touch()
-        assert (
-            ndkstack.find_readelf(ndk_path, ndk_path / "bin", "linux-x86_64")
-            == readelf_path
-        )
-
-    def test_find_in_standalone_toolchain(self, tmp_path: Path) -> None:
-        ndk_path = tmp_path / "ndk"
-        readelf_path = ndk_path / "bin/llvm-readelf"
-        readelf_path = readelf_path.with_suffix(ndkstack.EXE_SUFFIX)
-        readelf_path.parent.mkdir(parents=True)
-        readelf_path.touch()
-        assert (
-            ndkstack.find_readelf(ndk_path, ndk_path / "bin", "linux-x86_64")
-            == readelf_path
-        )
-
-    def test_not_found(self, tmp_path: Path) -> None:
-        assert ndkstack.find_readelf(tmp_path, tmp_path / "bin", "linux-x86_64") is None
-
-
-class FrameTests(unittest.TestCase):
-    """Test parsing of backtrace lines."""
-
-    def test_line_with_map_name(self) -> None:
-        line = b"  #14 pc 00001000  /fake/libfake.so"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#14", frame_info.num)
-        self.assertEqual(b"00001000", frame_info.pc)
-        self.assertEqual(b"/fake/libfake.so", frame_info.tail)
-        self.assertEqual(PurePosixPath("/fake/libfake.so"), frame_info.elf_file)
-        self.assertFalse(frame_info.offset)
-        self.assertFalse(frame_info.container_file)
-        self.assertFalse(frame_info.build_id)
-
-    def test_line_with_function(self) -> None:
-        line = b"  #08 pc 00001040  /fake/libfake.so (func())"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#08", frame_info.num)
-        self.assertEqual(b"00001040", frame_info.pc)
-        self.assertEqual(b"/fake/libfake.so (func())", frame_info.tail)
-        self.assertEqual(PurePosixPath("/fake/libfake.so"), frame_info.elf_file)
-        self.assertFalse(frame_info.offset)
-        self.assertFalse(frame_info.container_file)
-        self.assertFalse(frame_info.build_id)
-
-    def test_line_with_offset(self) -> None:
-        line = b"  #04 pc 00002050  /fake/libfake.so (offset 0x2000)"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#04", frame_info.num)
-        self.assertEqual(b"00002050", frame_info.pc)
-        self.assertEqual(b"/fake/libfake.so (offset 0x2000)", frame_info.tail)
-        self.assertEqual(PurePosixPath("/fake/libfake.so"), frame_info.elf_file)
-        self.assertEqual(0x2000, frame_info.offset)
-        self.assertFalse(frame_info.container_file)
-        self.assertFalse(frame_info.build_id)
-
-    def test_line_with_build_id(self) -> None:
-        line = b"  #03 pc 00002050  /fake/libfake.so (BuildId: d1d420a58366bf29f1312ec826f16564)"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#03", frame_info.num)
-        self.assertEqual(b"00002050", frame_info.pc)
-        self.assertEqual(
-            b"/fake/libfake.so (BuildId: d1d420a58366bf29f1312ec826f16564)",
-            frame_info.tail,
-        )
-        self.assertEqual(PurePosixPath("/fake/libfake.so"), frame_info.elf_file)
-        self.assertFalse(frame_info.offset)
-        self.assertFalse(frame_info.container_file)
-        self.assertEqual(b"d1d420a58366bf29f1312ec826f16564", frame_info.build_id)
-
-    def test_line_with_container_file(self) -> None:
-        line = b"  #10 pc 00003050  /fake/fake.apk!libc.so"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#10", frame_info.num)
-        self.assertEqual(b"00003050", frame_info.pc)
-        self.assertEqual(b"/fake/fake.apk!libc.so", frame_info.tail)
-        self.assertEqual(PurePosixPath("libc.so"), frame_info.elf_file)
-        self.assertFalse(frame_info.offset)
-        self.assertEqual(PurePosixPath("/fake/fake.apk"), frame_info.container_file)
-        self.assertFalse(frame_info.build_id)
-
-    def test_line_with_container_and_elf_equal(self) -> None:
-        line = b"  #12 pc 00004050  /fake/libc.so!lib/libc.so"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#12", frame_info.num)
-        self.assertEqual(b"00004050", frame_info.pc)
-        self.assertEqual(b"/fake/libc.so!lib/libc.so", frame_info.tail)
-        self.assertEqual(PurePosixPath("/fake/libc.so"), frame_info.elf_file)
-        self.assertFalse(frame_info.offset)
-        self.assertFalse(frame_info.container_file)
-        self.assertFalse(frame_info.build_id)
-
-    def test_line_everything(self) -> None:
-        line = (
-            b"  #07 pc 00823fc  /fake/fake.apk!libc.so (__start_thread+64) "
-            b"(offset 0x1000) (BuildId: 6a0c10d19d5bf39a5a78fa514371dab3)"
-        )
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        self.assertEqual(b"#07", frame_info.num)
-        self.assertEqual(b"00823fc", frame_info.pc)
-        self.assertEqual(
-            b"/fake/fake.apk!libc.so (__start_thread+64) "
-            b"(offset 0x1000) (BuildId: 6a0c10d19d5bf39a5a78fa514371dab3)",
-            frame_info.tail,
-        )
-        self.assertEqual(PurePosixPath("libc.so"), frame_info.elf_file)
-        self.assertEqual(0x1000, frame_info.offset)
-        self.assertEqual(PurePosixPath("/fake/fake.apk"), frame_info.container_file)
-        self.assertEqual(b"6a0c10d19d5bf39a5a78fa514371dab3", frame_info.build_id)
-
-    def test_0x_prefixed_address(self) -> None:
-        """Tests that addresses beginning with 0x are parsed correctly."""
-        frame_info = ndkstack.FrameInfo.from_line(
-            b"  #00  pc 0x000000000006263c  "
-            b"/apex/com.android.runtime/lib/bionic/libc.so (abort+172)"
-        )
-        assert frame_info is not None
-        assert frame_info.pc == b"000000000006263c"
-
-
-@patch.object(ndkstack, "get_build_id")
-@patch("os.path.exists")
-class VerifyElfFileTests(unittest.TestCase):
-    """Tests of verify_elf_file()."""
-
-    def create_frame_info(self) -> ndkstack.FrameInfo:
-        line = b"  #03 pc 00002050  /fake/libfake.so"
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        return frame_info
-
-    def test_elf_file_does_not_exist(self, mock_exists: Mock, _: Mock) -> None:
-        mock_exists.return_value = False
-        frame_info = self.create_frame_info()
-        self.assertFalse(
-            frame_info.verify_elf_file(None, Path("/fake/libfake.so"), "libfake.so")
-        )
-        self.assertFalse(
-            frame_info.verify_elf_file(
-                Path("llvm-readelf"), Path("/fake/libfake.so"), "libfake.so"
-            )
-        )
-
-    def test_elf_file_build_id_matches(
-        self, mock_exists: Mock, mock_get_build_id: Mock
-    ) -> None:
-        mock_exists.return_value = True
-        frame_info = self.create_frame_info()
-        frame_info.build_id = b"MOCKED_BUILD_ID"
-        self.assertTrue(
-            frame_info.verify_elf_file(None, Path("/mocked/libfake.so"), "libfake.so")
-        )
-        mock_get_build_id.assert_not_called()
-
-        mock_get_build_id.return_value = b"MOCKED_BUILD_ID"
-        self.assertTrue(
-            frame_info.verify_elf_file(
-                Path("llvm-readelf"), Path("/mocked/libfake.so"), "libfake.so"
-            )
-        )
-        mock_get_build_id.assert_called_once_with(
-            Path("llvm-readelf"), Path("/mocked/libfake.so")
-        )
-
-    def test_elf_file_build_id_does_not_match(
-        self, mock_exists: Mock, mock_get_build_id: Mock
-    ) -> None:
-        mock_exists.return_value = True
-        mock_get_build_id.return_value = b"MOCKED_BUILD_ID"
-        frame_info = self.create_frame_info()
-        frame_info.build_id = b"DIFFERENT_BUILD_ID"
-        with patch("sys.stdout", new_callable=StringIO) as mock_stdout:
-            self.assertTrue(
-                frame_info.verify_elf_file(None, Path("/mocked/libfake.so"), "none.so")
-            )
-            self.assertFalse(
-                frame_info.verify_elf_file(
-                    Path("llvm-readelf"), Path("/mocked/libfake.so"), "display.so"
-                )
-            )
-        output = textwrap.dedent(
-            """\
-            WARNING: Mismatched build id for display.so
-            WARNING:   Expected DIFFERENT_BUILD_ID
-            WARNING:   Found    MOCKED_BUILD_ID
-        """
-        )
-        self.assertEqual(output, mock_stdout.getvalue())
-
-
-class GetZipInfoFromOffsetTests(unittest.TestCase):
-    """Tests of get_zip_info_from_offset()."""
-
-    def setUp(self) -> None:
-        self.mock_zip = mock.MagicMock()
-        self.mock_zip.filename = "/fake/zip.apk"
-        self.mock_zip.infolist.return_value = []
-
-    def test_file_does_not_exist(self) -> None:
-        with self.assertRaises(IOError):
-            _ = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1000)
-
-    @patch("os.stat")
-    def test_offset_ge_file_size(self, mock_stat: Mock) -> None:
-        mock_stat.return_value.st_size = 0x1000
-        self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1000))
-        self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1100))
-
-    @patch("os.stat")
-    def test_empty_infolist(self, mock_stat: Mock) -> None:
-        mock_stat.return_value.st_size = 0x1000
-        self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x900))
-
-    @patch("os.stat")
-    def test_zip_info_single_element(self, mock_stat: Mock) -> None:
-        mock_stat.return_value.st_size = 0x2000
-
-        mock_zip_info = mock.MagicMock()
-        mock_zip_info.header_offset = 0x100
-        self.mock_zip.infolist.return_value = [mock_zip_info]
-
-        self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x50))
-
-        self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x2000))
-
-        zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x200)
-        assert zip_info is not None
-        self.assertEqual(0x100, zip_info.header_offset)
-
-    @patch("os.stat")
-    def test_zip_info_checks(self, mock_stat: Mock) -> None:
-        mock_stat.return_value.st_size = 0x2000
-
-        mock_zip_info1 = mock.MagicMock()
-        mock_zip_info1.header_offset = 0x100
-        mock_zip_info2 = mock.MagicMock()
-        mock_zip_info2.header_offset = 0x1000
-        self.mock_zip.infolist.return_value = [mock_zip_info1, mock_zip_info2]
-
-        self.assertFalse(ndkstack.get_zip_info_from_offset(self.mock_zip, 0x50))
-
-        zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x200)
-        assert zip_info is not None
-        self.assertEqual(0x100, zip_info.header_offset)
-
-        zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x100)
-        assert zip_info is not None
-        self.assertEqual(0x100, zip_info.header_offset)
-
-        zip_info = ndkstack.get_zip_info_from_offset(self.mock_zip, 0x1000)
-        assert zip_info is not None
-        self.assertEqual(0x1000, zip_info.header_offset)
-
-
-class GetElfFileTests(unittest.TestCase):
-    """Tests of FrameInfo.get_elf_file()."""
-
-    def setUp(self) -> None:
-        self.mock_zipfile = mock.MagicMock()
-        self.mock_zipfile.extract.return_value = "/fake_tmp/libtest.so"
-        self.mock_zipfile.__enter__.return_value = self.mock_zipfile
-
-        self.mock_tmp = mock.MagicMock()
-        self.mock_tmp.get_directory.return_value = "/fake_tmp"
-
-    # TODO: Refactor so this can specify a real return type.
-    # We can't specify anything more accurate than `Any` here because the real return
-    # value is a FrameInfo that's had its verify_elf_file method monkey patched with a
-    # mock.
-    def create_frame_info(self, tail: bytes) -> Any:
-        line = b"  #03 pc 00002050  " + tail
-        frame_info = ndkstack.FrameInfo.from_line(line)
-        assert frame_info is not None
-        # mypy can't (and won't) tolerate this.
-        # https://github.com/python/mypy/issues/2427
-        frame_info.verify_elf_file = mock.Mock()  # type: ignore
-        return frame_info
-
-    def test_file_only(self) -> None:
-        frame_info = self.create_frame_info(b"/fake/libfake.so")
-        frame_info.verify_elf_file.return_value = True
-        self.assertEqual(
-            Path("/fake_dir/symbols/libfake.so"),
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp),
-        )
-        frame_info.verify_elf_file.reset_mock()
-        frame_info.verify_elf_file.return_value = False
-        self.assertFalse(
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        )
-        self.assertEqual(b"/fake/libfake.so", frame_info.tail)
-
-    def test_container_set_elf_in_symbol_dir(self) -> None:
-        frame_info = self.create_frame_info(b"/fake/fake.apk!libtest.so")
-        frame_info.verify_elf_file.return_value = True
-        self.assertEqual(
-            Path("/fake_dir/symbols/libtest.so"),
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp),
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so", frame_info.tail)
-
-    def test_container_set_elf_not_in_symbol_dir_apk_does_not_exist(self) -> None:
-        frame_info = self.create_frame_info(b"/fake/fake.apk!libtest.so")
-        frame_info.verify_elf_file.return_value = False
-        with self.assertRaises(IOError):
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        self.assertEqual(b"/fake/fake.apk!libtest.so", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_container_set_elf_not_in_apk(
-        self, _: Mock, mock_get_zip_info: Mock
-    ) -> None:
-        mock_get_zip_info.return_value = None
-        frame_info = self.create_frame_info(
-            b"/fake/fake.apk!libtest.so (offset 0x2000)"
-        )
-        frame_info.verify_elf_file.return_value = False
-        self.assertFalse(
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_container_set_elf_in_apk(
-        self, mock_zipclass: Mock, mock_get_zip_info: Mock
-    ) -> None:
-        mock_zipclass.return_value = self.mock_zipfile
-        mock_get_zip_info.return_value.filename = "libtest.so"
-
-        frame_info = self.create_frame_info(
-            b"/fake/fake.apk!libtest.so (offset 0x2000)"
-        )
-        frame_info.verify_elf_file.side_effect = [False, True]
-        self.assertEqual(
-            Path("/fake_tmp/libtest.so"),
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp),
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_container_set_elf_in_apk_verify_fails(
-        self, mock_zipclass: Mock, mock_get_zip_info: Mock
-    ) -> None:
-        mock_zipclass.return_value = self.mock_zipfile
-        mock_get_zip_info.return_value.filename = "libtest.so"
-
-        frame_info = self.create_frame_info(
-            b"/fake/fake.apk!libtest.so (offset 0x2000)"
-        )
-        frame_info.verify_elf_file.side_effect = [False, False]
-        self.assertFalse(
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
-
-    def test_in_apk_file_does_not_exist(self) -> None:
-        frame_info = self.create_frame_info(b"/fake/fake.apk")
-        frame_info.verify_elf_file.return_value = False
-        with self.assertRaises(IOError):
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        self.assertEqual(b"/fake/fake.apk", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_in_apk_elf_not_in_apk(self, _: Mock, mock_get_zip_info: Mock) -> None:
-        mock_get_zip_info.return_value = None
-        frame_info = self.create_frame_info(b"/fake/fake.apk (offset 0x2000)")
-        self.assertFalse(
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        )
-        self.assertEqual(b"/fake/fake.apk (offset 0x2000)", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_in_apk_elf_in_symbol_dir(
-        self, mock_zipclass: Mock, mock_get_zip_info: Mock
-    ) -> None:
-        mock_zipclass.return_value = self.mock_zipfile
-        mock_get_zip_info.return_value.filename = "libtest.so"
-
-        frame_info = self.create_frame_info(b"/fake/fake.apk (offset 0x2000)")
-        frame_info.verify_elf_file.return_value = True
-        self.assertEqual(
-            Path("/fake_dir/symbols/libtest.so"),
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp),
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_in_apk_elf_in_apk(
-        self, mock_zipclass: Mock, mock_get_zip_info: Mock
-    ) -> None:
-        mock_zipclass.return_value = self.mock_zipfile
-        mock_get_zip_info.return_value.filename = "libtest.so"
-
-        frame_info = self.create_frame_info(b"/fake/fake.apk (offset 0x2000)")
-        frame_info.verify_elf_file.side_effect = [False, True]
-        self.assertEqual(
-            Path("/fake_tmp/libtest.so"),
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp),
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
-
-    @patch.object(ndkstack, "get_zip_info_from_offset")
-    @patch("zipfile.ZipFile")
-    def test_in_apk_elf_in_apk_verify_fails(
-        self, mock_zipclass: Mock, mock_get_zip_info: Mock
-    ) -> None:
-        mock_zipclass.return_value = self.mock_zipfile
-        mock_get_zip_info.return_value.filename = "libtest.so"
-
-        frame_info = self.create_frame_info(b"/fake/fake.apk (offset 0x2000)")
-        frame_info.verify_elf_file.side_effect = [False, False]
-        self.assertFalse(
-            frame_info.get_elf_file(Path("/fake_dir/symbols"), None, self.mock_tmp)
-        )
-        self.assertEqual(b"/fake/fake.apk!libtest.so (offset 0x2000)", frame_info.tail)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/tests/pytest/ndkstack/test_systemtest.py b/tests/pytest/ndkstack/test_systemtest.py
deleted file mode 100755
index 08f835d..0000000
--- a/tests/pytest/ndkstack/test_systemtest.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-"""System tests for ndk-stack.py"""
-
-import os.path
-import subprocess
-import unittest
-
-import ndk.ext.subprocess
-import ndk.paths
-import ndk.toolchains
-from ndk.hosts import Host
-
-
-class SystemTests(unittest.TestCase):
-    """Complete system test of ndk-stack.py script."""
-
-    def system_test(self, backtrace_file: str, expected_file: str) -> None:
-        ndk_path = ndk.paths.get_install_path()
-        self.assertTrue(
-            ndk_path.exists(),
-            f"{ndk_path} does not exist. Build the NDK before running this test.",
-        )
-
-        ndk_stack = ndk_path / "ndk-stack"
-        if Host.current() is Host.Windows64:
-            ndk_stack = ndk_stack.with_suffix(".bat")
-
-        symbol_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "files")
-        proc = subprocess.run(
-            [
-                ndk_stack,
-                "-s",
-                symbol_dir,
-                "-i",
-                os.path.join(symbol_dir, backtrace_file),
-            ],
-            check=True,
-            capture_output=True,
-        )
-
-        # Read the expected output.
-        file_name = os.path.join(symbol_dir, expected_file)
-        with open(file_name, "rb") as exp_file:
-            expected = exp_file.read()
-        expected = expected.replace(b"SYMBOL_DIR", symbol_dir.encode("utf-8"))
-        self.maxDiff = None
-        self.assertEqual(expected.decode("utf-8"), proc.stdout.decode("utf-8"))
-
-    def test_all_stacks(self) -> None:
-        self.system_test("backtrace.txt", "expected.txt")
-
-    def test_multiple_crashes(self) -> None:
-        self.system_test("multiple.txt", "expected_multiple.txt")
-
-    def test_hwasan(self) -> None:
-        self.system_test("hwasan.txt", "expected_hwasan.txt")
-
-    def test_invalid_unicode(self) -> None:
-        with ndk.ext.subprocess.verbose_subprocess_errors():
-            self.system_test(
-                "invalid_unicode_log.txt", "expected_invalid_unicode_log.txt"
-            )
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/wrap.sh/asan.sh b/wrap.sh/asan.sh
index 3929540..5d7d2b3 100644
--- a/wrap.sh/asan.sh
+++ b/wrap.sh/asan.sh
@@ -1,30 +1,11 @@
 #!/system/bin/sh
-HERE=$(cd "$(dirname "$0")" && pwd)
-
-cmd=$1
-shift
-
-# This must be called *before* `LD_PRELOAD` is set. Otherwise, if this is a 32-
-# bit app running on a 64-bit device, the 64-bit getprop will fail to load
-# because it will preload a 32-bit ASan runtime.
-# https://github.com/android/ndk/issues/1744
-os_version=$(getprop ro.build.version.sdk)
-
-if [ "$os_version" -eq "27" ]; then
-  cmd="$cmd -Xrunjdwp:transport=dt_android_adb,suspend=n,server=y -Xcompiler-option --debuggable $@"
-elif [ "$os_version" -eq "28" ]; then
-  cmd="$cmd -XjdwpProvider:adbconnection -XjdwpOptions:suspend=n,server=y -Xcompiler-option --debuggable $@"
-else
-  cmd="$cmd -XjdwpProvider:adbconnection -XjdwpOptions:suspend=n,server=y $@"
-fi
-
+HERE="$(cd "$(dirname "$0")" && pwd)"
 export ASAN_OPTIONS=log_to_syslog=false,allow_user_segv_handler=1
-ASAN_LIB=$(ls "$HERE"/libclang_rt.asan-*-android.so)
+ASAN_LIB=$(ls $HERE/libclang_rt.asan-*-android.so)
 if [ -f "$HERE/libc++_shared.so" ]; then
     # Workaround for https://github.com/android-ndk/ndk/issues/988.
     export LD_PRELOAD="$ASAN_LIB $HERE/libc++_shared.so"
 else
     export LD_PRELOAD="$ASAN_LIB"
 fi
-
-exec $cmd
+"$@"
diff --git a/wrap.sh/hwasan.sh b/wrap.sh/hwasan.sh
deleted file mode 100644
index 88445fc..0000000
--- a/wrap.sh/hwasan.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/system/bin/sh
-LD_HWASAN=1 exec "$@"