blob: b9343b21774dedf4daf030222ede3d7cbea5d1b8 [file] [log] [blame]
#!/usr/bin/env python3
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates a tarball suitable for use as a Rust prebuilt for Android."""
import argparse
from datetime import datetime
import os
from pathlib import Path
import re
import shutil
import source_manager
import subprocess
import sys
import tarfile
import tempfile
import urllib.request
import audit
import build_platform
import cargo
import config
from paths import *
from utils import (
TERM_GREEN,
TERM_RED,
TERM_YELLOW,
ExtantPath,
ResolvedPath,
archive_create,
archive_extract,
compress_file,
export_profiles,
get_prebuilt_binary_paths,
is_archive,
print_colored,
run_and_exit_on_failure,
run_quiet,
strip_symbols)
from upstream_tests import disabled_tests
#
# Constants
#
STDLIB_SOURCES = [
"Cargo.lock",
"library",
"vendor/backtrace",
"vendor/cfg-if",
"vendor/compiler_builtins",
"vendor/getopts",
"vendor/hashbrown",
"vendor/libc",
"vendor/rustc-demangle",
"vendor/unicode-width",
]
LLVM_BUILD_PATHS_OF_INTEREST: list[str] = [
"build.ninja",
"cmake",
"CMakeCache.txt",
"CMakeFiles",
"cmake_install.cmake",
"compile_commands.json",
"CPackConfig.cmake",
"CPackSourceConfig.cmake",
"install_manifest.txt",
"llvm.spec"
]
#
# Program logic
#
def parse_args(argv: list[str] | None) -> argparse.Namespace:
"""Parses arguments and returns the parsed structure."""
parser = argparse.ArgumentParser("Build the Rust Toolchain")
parser.add_argument(
"--build-name", "-b", default="dev",
help="Release name for the dist result")
parser.add_argument(
"--dist", "-d", dest="dist_path", type=ResolvedPath, default=DIST_PATH_DEFAULT,
help="Where to place distributable artifacts")
parser.add_argument(
"--copy-and-patch", action=argparse.BooleanOptionalAction, default=True,
help="Whether to copy and patch the source, or reuse from a prior run")
parser.add_argument(
"--repatch", action="store_true",
help="Don't copy the whole source. Just copy and repatch the files affected by the patches.")
parser.add_argument(
"--patch-abort", action=argparse.BooleanOptionalAction, default=True,
help="Abort on patch failure")
parser.add_argument(
"--stage", "-s", type=int, choices=[1,2,3],
help="Target Rust boostrap stage")
parser.add_argument(
"--config-only", action="store_true",
help="Setup up the source code and configure it and then exit")
parser.add_argument(
"--cargo-audit", action=argparse.BooleanOptionalAction, default=True,
help="To run `cargo vet` and `cargo deny` or not")
parser.add_argument(
"--verbose", action=argparse.BooleanOptionalAction, default=True,
help="Verbose")
ndk_group = parser.add_mutually_exclusive_group()
ndk_group.add_argument(
"--ndk", type=ExtantPath, dest="ndk_path",
help="Path to location of the NDK to build against")
ndk_group.add_argument(
"--ndk-search-path", type=ExtantPath,
help="Where to search for an NDK archive (android-ndk-*-linux-x86_64)")
pgo_group = parser.add_mutually_exclusive_group()
pgo_group.add_argument(
"--profile-generate", type=ResolvedPath, nargs="?", const=OUT_PATH_PROFILES,
help="Instrument the compiler and store profiles in the specified directory")
pgo_group.add_argument(
"--profile-use", type=ExtantPath, nargs="?", const=OUT_PATH_PROFILES,
help="Use the rust.profdata and llvm.profdata files in the provided "
"directory to optimize the compiler")
parser.add_argument(
"--cs-profile-generate", type=ResolvedPath, nargs="?", const=OUT_PATH_PROFILES,
help="Instrument the LLVM libraries to generate context-sensitive profiles")
parser.add_argument(
"--lto", "-l", default="none", choices=["none", "thin"],
help="Type of LTO to perform. Valid LTO types: none, thin, full")
parser.add_argument(
"--emit-relocs", action="store_true",
help="Emit relocation information")
parser.add_argument(
"--gc-sections", action=argparse.BooleanOptionalAction, default=True,
help="Garbage collect sections during linking")
parser.add_argument(
"--cgu1", action=argparse.BooleanOptionalAction, default=True,
help="Set `-C codegen-units=1` when building the toolchain")
parser.add_argument(
"--llvm-linkage", default="shared", choices=["static", "shared"],
help="Specify if LLVM should be built as a static or shared library")
parser.add_argument(
"--host", default=build_platform.triple(),
help="Override the autodetected host architecture")
parser.add_argument(
"--bare-targets", action=argparse.BooleanOptionalAction, default=True,
help="Don't build libraries for bare targets")
parser.add_argument(
"--device-targets", action=argparse.BooleanOptionalAction, default=True,
help="Don't build libraries for devices")
parser.add_argument(
"--host-multilibs", action=argparse.BooleanOptionalAction, default=True,
help="Don't build libraries for alternate host triples")
parser.add_argument(
"--host-only", action=argparse.BooleanOptionalAction, default=False,
help="Implies no-bare-targets, no-device-targets, and no-host-multilibs")
parser.add_argument(
"--upstream-test", action="store_true",
help="Run upstream tests as part of the build process")
parser.add_argument(
"--upstream-test-only", action="store_true",
help="Run upstream tests and exit without building a full toolchain")
args = parser.parse_args(argv)
if build_platform.is_darwin():
if args.profile_generate != None or args.profile_use != None:
sys.exit("PGO is not supported on the Darwin platform")
if args.host != build_platform.triple():
sys.exit("Cross compiling toolchains is not supported on the Darwin platform")
if args.cs_profile_generate != None and args.llvm_linkage == "static" and args.lto == None:
sys.exit("Context-sensitive PGO with LLVM static linkage requires LTO to be enabled")
if args.ndk_search_path is not None and not args.ndk_search_path.is_dir():
sys.exit(f"NDK search path is not a directory: {args.ndk_search_path}")
if args.host_only:
args.no_bare_targets = True
args.no_device_targets = True
args.no_host_multilibs = True
return args
# Matches both release archive names and CI build names, e.g.:
# * android-ndk-r26-linux.zip
# * android-ndk-10792818-linux-x86_64.zip
PATTERN_NDK_ARCHIVE = re.compile("android-ndk(?:-\w+)?-linux(?:-x86_64)?\.zip")
def initialize_ndk(args: argparse.Namespace) -> int:
if args.ndk_search_path is not None:
for path in args.ndk_search_path.iterdir():
if path.is_file() and PATTERN_NDK_ARCHIVE.match(path.name):
args.ndk_path = extract_ndk_archive(path)
return 0
print(f"Unable to locate NDK archive in search path {args.ndk_search_path}")
return 1
elif args.ndk_path is None:
if env_ndk_str := os.environ.get("ANDROID_NDK_PATH"):
args.ndk_path = Path(env_ndk_str)
if not args.ndk_path.exists():
sys.exit(f"ANDROID_NDK_PATH {args.ndk_path} does not exist")
else:
args.ndk_path = DOWNLOADS_PATH / NDK_DIRECTORY_NAME
if not args.ndk_path.exists():
if not DOWNLOADS_PATH.exists():
DOWNLOADS_PATH.mkdir()
ndk_archive_path = DOWNLOADS_PATH / NDK_RELEASE_ARCHIVE
if not ndk_archive_path.exists():
print(f"Downloading NDK archive: {NDK_DOWNLOAD_URL}")
urllib.request.urlretrieve(NDK_DOWNLOAD_URL, ndk_archive_path)
print("Decompressing NDK archive")
archive_extract(ndk_archive_path, DOWNLOADS_PATH)
# Ensure that there is an empty Android.mk file to prevent Soong
# from recursing into the NDK directory.
(args.ndk_path / "Android.mk").touch()
return 0
elif is_archive(args.ndk_path):
args.ndk_path = extract_ndk_archive(args.ndk_path)
return 0
elif args.ndk_path.is_dir():
return 0
else:
print(f"Unrecognized file format: {args.ndk_path.name}")
return 1
def extract_ndk_archive(archive_path: Path) -> Path:
print(f"Unzipping NKD archive {str(archive_path)}")
# Before exiting the script will test to see if args.ndk_path is
# relative to the system temp directory and, if it is, will delete it.
extract_dir = Path(tempfile.mkdtemp(prefix="android_ndk-"))
archive_extract(archive_path, extract_dir)
archive_top_level = list(extract_dir.iterdir())
if len(archive_top_level) != 1 or not archive_top_level[0].name.startswith("android-ndk-"):
raise RuntimeError("Unrecognized NDK archive layout")
return extract_dir / archive_top_level[0].name
def regenerate_lockfile(project_path: Path, env: dict[str, str]) -> None:
"""Offline fetch to regenerate lockfiles"""
run_and_exit_on_failure(
[CARGO_PATH, "fetch", "--offline"],
f"Failed to rebuilt {project_path.as_posix()}/Cargo.lock via cargo-fetch operation",
cwd=project_path, env=env)
def main(argv: list[str] | None = None) -> int:
"""Runs the configure-build-fixup-dist pipeline."""
args = parse_args(argv)
args.dist_path.mkdir(exist_ok=True)
with open(args.dist_path / BUILD_COMMAND_RECORD_NAME, "w") as f:
f.write(" ".join(argv or sys.argv))
# Add some output padding to make the messages easier to read
print()
# Initialize the return code
retcode = 0
#
# Initialize directories, links, and prebuilts
#
OUT_PATH.mkdir(exist_ok=True)
OUT_PATH_WRAPPERS.mkdir(exist_ok=True)
if OUT_PATH_PACKAGE.exists():
shutil.rmtree(OUT_PATH_PACKAGE)
OUT_PATH_PACKAGE.mkdir()
args.dist_path.mkdir(exist_ok=True)
if not build_platform.is_windows():
# Set up links to host tools. This is a temporary workaround and will be
# removed when there are no more host tools required to build the
# toolchain.
OUT_PATH_BIN_LINKS.mkdir(exist_ok=True, parents=True)
PERL_LINK_PATH.unlink(missing_ok=True)
SH_LINK_PATH.unlink(missing_ok=True)
UNZIP_LINK_PATH.unlink(missing_ok=True)
STRIP_LINK_PATH.unlink(missing_ok=True)
which_perl = shutil.which("perl")
if which_perl is not None:
os.symlink(which_perl, PERL_LINK_PATH)
else:
raise RuntimeError("Unable to find Perl executable")
which_sh = shutil.which("sh")
if which_sh is not None:
os.symlink(which_sh, SH_LINK_PATH)
else:
raise RuntimeError("Unable to find sh executable")
os.symlink(ZIPTOOL_PATH, UNZIP_LINK_PATH)
os.symlink(OBJCOPY_PATH, STRIP_LINK_PATH)
if build_platform.is_darwin():
# To avoid using system libc++.dylib on Darwin hosts we need to copy
# the prebuilt version into the build directory's lib dir. This is
# necessary because buildbot security policies do not allow for
# modifying the DYLD_LIBRARY_PATH environment variable.
OUT_PATH_LLVM_LIB_DIR.mkdir(parents=True, exist_ok=True)
shutil.copy2(LLVM_CXX_RUNTIME_PATH_HOST / "libc++.dylib", OUT_PATH_LLVM_LIB_DIR)
retcode = initialize_ndk(args)
if retcode != 0:
return retcode
#
# Setup source files
#
if args.copy_and_patch:
source_manager.setup_files(
RUST_SOURCE_PATH, OUT_PATH_RUST_SOURCE, PATCHES_PATH,
patch_abort=args.patch_abort, repatch=args.repatch)
#
# Configure Rust
#
env = dict(os.environ)
config.configure(args, env)
# Flush stdout to ensure correct output ordering in the logs
sys.stdout.flush()
if args.config_only:
return 0
#
# Build
#
# Trigger bootstrap to trigger vendoring
#
# Call is not checked because this is *expected* to fail - there isn't a
# user facing way to directly trigger the bootstrap, so we give it a
# no-op to perform that will require it to write out the cargo config.
run_quiet([PYTHON_PATH, OUT_PATH_RUST_SOURCE / "x.py", "--help"], cwd=OUT_PATH_RUST_SOURCE, env=env)
# Because some patches may have touched vendored source we will rebuild
# specific Cargo.lock files.
regenerate_lockfile(OUT_PATH_RUST_SOURCE, env)
# TODO: Required by patch #40
regenerate_lockfile(OUT_PATH_RUST_BOOTSTRAP_SOURCE, env)
regenerate_lockfile(OUT_PATH_RUST_CARGO_SOURCE, env)
# We only need to perform stage 3 of the bootstrap process when we are
# collecting profile data.
if args.stage:
bootstrap_stage = str(args.stage)
elif args.profile_generate or args.cs_profile_generate:
bootstrap_stage = "3"
else:
bootstrap_stage = "2"
# Build the compiler
if not args.upstream_test_only:
result = subprocess.run(
[PYTHON_PATH, OUT_PATH_RUST_SOURCE / "x.py", "--stage", bootstrap_stage, "install"],
cwd=OUT_PATH_RUST_SOURCE, env=env)
if result.returncode != 0:
print(f"Build stage failed with error {result.returncode}")
if LLVM_BUILD_PATH.exists():
tarball_datetime = datetime.now().isoformat(timespec="seconds")
tarball_path = args.dist_path / f"llvm-build-config-{tarball_datetime}.tar"
with tarfile.open(tarball_path.as_posix(), mode="x") as tar_ref:
orig_cwd = os.getcwd()
os.chdir(LLVM_BUILD_PATH.as_posix())
for path in LLVM_BUILD_PATHS_OF_INTEREST:
tar_ref.add(path)
os.chdir(orig_cwd)
compress_file(tarball_path)
return result.returncode
# Run the tests
if args.upstream_test or args.upstream_test_only:
# TODO: Put the test deny list in a separate file
result = subprocess.run(
[PYTHON_PATH, OUT_PATH_RUST_SOURCE / "x.py",
"--stage", bootstrap_stage,
"--target", args.host,
"test",
"--no-doc",
]+disabled_tests,
cwd=OUT_PATH_RUST_SOURCE, env=env)
if result.returncode != 0:
print(f"Test stage failed with error {result.returncode}")
return result.returncode
if args.upstream_test_only:
return 0
# Install sources
if build_platform.is_linux():
shutil.rmtree(OUT_PATH_STDLIB_SRCS, ignore_errors=True)
shutil.rmtree(OUT_PATH_STDLIB_SRCS_LEGACY.parent, ignore_errors=True)
OUT_PATH_STDLIB_SRCS.mkdir(parents=True)
for stdlib in STDLIB_SOURCES:
src_path = OUT_PATH_RUST_SOURCE.joinpath(stdlib)
dst_path = OUT_PATH_STDLIB_SRCS.joinpath(stdlib)
if src_path.is_file():
shutil.copy(src_path, dst_path)
else:
shutil.copytree(src_path, dst_path)
# TODO(b/271885049) Remove after Soong build doesn't use it anymore
os.makedirs(OUT_PATH_STDLIB_SRCS_LEGACY.parent)
os.symlink(OUT_PATH_STDLIB_SRCS_LEGACY_RELATIVE,
OUT_PATH_STDLIB_SRCS_LEGACY, target_is_directory=True)
#
# Symbol fixup
#
# The Rust build doesn't have an option to auto-strip binaries so we do
# it here. We only strip symbols from executables and .so objects.
flag = "--strip-debug" if args.emit_relocs else "--strip-unneeded"
for obj_path in get_prebuilt_binary_paths(OUT_PATH_PACKAGE):
if strip_symbols(obj_path, flag) != 0:
print(f"Failed to strip symbols from {obj_path}")
retcode = 1
#
# File fixup
#
copy_libs = []
# Install the libc++ library to out/package/lib64/
if build_platform.is_darwin():
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libc++.dylib")
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libc++abi.dylib")
elif args.host == "x86_64-unknown-linux-musl":
copy_libs.append(LLVM_CXX_RUNTIME_PATH_LINUX_MUSL / "libc++.so")
copy_libs.append(MUSL_SYSROOT64_PATH / "lib" / "libc_musl.so")
elif not build_platform.is_windows():
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libc++.so")
# Install libLLVM+deps to out/package/lib64
if build_platform.is_darwin():
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / f"libLLVM-{CLANG_VERSION}.dylib")
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libedit.0.dylib")
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libxml2.2.dylib")
else:
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / f"libLLVM-{CLANG_VERSION}.so")
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libedit.so.0")
copy_libs.append(LLVM_CXX_RUNTIME_PATH_HOST / "libxml2.so.2")
if copy_libs:
lib64_path = OUT_PATH_PACKAGE / "lib64"
lib64_path.mkdir(exist_ok=True)
for lib in copy_libs:
shutil.copy2(lib, lib64_path / os.path.basename(lib))
# Some stdlib crates might include Android.mk or Android.bp files.
# If they do, filter them out.
if build_platform.is_linux():
for file_path in OUT_PATH_STDLIB_SRCS.glob("**/Android.{mk,bp}"):
file_path.unlink()
# Copy in the Bazel build definitions
if args.host == "x86_64-unknown-linux-gnu":
shutil.copy(TOOLCHAIN_BAZEL_PATH, OUT_PATH_PACKAGE)
#
# Build cargo-deny and cargo-vet
#
if args.host == build_platform.triple():
cargo_deny = cargo.Crate(
WORKSPACE_PATH / "toolchain" / "cargo-deny",
env,
cargo_path=NEW_CARGO_PATH,
target=args.host,
linker=config.get_wrapper_paths(args.host)[2])
cargo_deny.cargo_install(OUT_PATH_PACKAGE)
cargo_vet = cargo.Crate(
WORKSPACE_PATH / "toolchain" / "cargo-vet",
env,
cargo_path=NEW_CARGO_PATH,
target=args.host,
linker=config.get_wrapper_paths(args.host)[2])
cargo_vet.cargo_install(OUT_PATH_PACKAGE)
# Audit ourselves
if args.cargo_audit:
cargo_deny.cargo_deny()
cargo_deny.cargo_vet()
cargo_vet.cargo_deny()
cargo_vet.cargo_vet()
# TODO(jamesfarrell): Run "cargo test".
# cargo-vet needs https://github.com/mozilla/cargo-vet/pull/521
# cargo-deny has a test that accesses the network that needs to go on a denylist.
#
# Dist
#
print("Creating artifacts")
export_profiles(args.profile_generate or args.cs_profile_generate, args.dist_path)
if args.profile_use and args.profile_use != args.dist_path:
for p in args.profile_use.glob("*.profdata"):
shutil.copy(p, args.dist_path)
archive_create(args.dist_path / f"rust-{args.build_name}", OUT_PATH_PACKAGE)
#
# Perform artifact audit
#
if build_platform.is_linux():
shared_libs_allow_list = audit.get_allow_list()
shared_libs_actual = audit.get_required_libs(OUT_PATH_PACKAGE)
print("\n")
if shared_libs_actual != shared_libs_allow_list:
print_colored("Warning - Shared library requirements changed", TERM_YELLOW)
new_reqs = [lib for lib in shared_libs_actual if lib not in shared_libs_allow_list]
old_reqs = [lib for lib in shared_libs_allow_list if lib not in shared_libs_actual]
if new_reqs:
print_colored("New required libraries:", TERM_RED)
for lib_path in new_reqs:
print_colored(f"\t{lib_path}", TERM_RED)
retcode = 1
if old_reqs:
print_colored("Stale list entries:", TERM_YELLOW)
for lib_path in old_reqs:
print_colored(f"\t{lib_path}", TERM_YELLOW)
else:
print_colored("No new shared libraries detected", TERM_GREEN)
print("")
#
# Cleanup
#
if args.ndk_path.is_relative_to(tempfile.gettempdir()):
shutil.rmtree(args.ndk_path.as_posix())
return retcode
if __name__ == "__main__":
sys.exit(main())