| #!/usr/bin/env python3 |
| # |
| # Copyright (C) 2022 The Android Open Source Project |
| # |
| # Licensed under the Apache License, Version 2.0 (the "License"); |
| # you may not use this file except in compliance with the License. |
| # You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| import argparse |
| from datetime import datetime |
| import inspect |
| import os |
| import shutil |
| import sys |
| |
| from paths import DIST_PATH_DEFAULT, OUT_PATH_TRACE, TOOLCHAIN_CC_MODULE_LIST_PATH, TOOLCHAIN_JAVA_MODULE_LIST_PATH, TOOLCHAIN_RUST_MODULE_LIST_PATH |
| from soong_trace import TraceInfo, load_target_prefixes, process_trace |
| from test_compiler import prepare_prebuilts |
| from utils import TEST_VERSION_NUMBER, ExtantPath, run_build_target |
| |
| def parse_args() -> argparse.Namespace: |
| parser = argparse.ArgumentParser(description=inspect.getdoc(sys.modules[__name__])) |
| |
| parser.add_argument("--outdir", "-o", type=ExtantPath, default=DIST_PATH_DEFAULT, |
| help="Where the directory containing the benchmark results will be created") |
| parser.add_argument("--label", "-l", type=str, |
| default=f"rust-benchmark-{datetime.now().strftime('%Y-%m-%dT%H%M%S')}", |
| help="Name to give the directory created to store results from this benchmark run") |
| parser.add_argument("--iterations", "-i", type=int, default=5, |
| help="Number of times to test each compiler") |
| |
| parser.add_argument("toolchains", nargs="+", type=ExtantPath) |
| |
| args = parser.parse_args() |
| |
| if args.iterations < 0: |
| sys.exit("Number of iterations can't be negative") |
| elif args.iterations > 50: |
| sys.exit("Number of iterations is too large; must be 50 or less") |
| |
| return args |
| |
| def main() -> None: |
| args = parse_args() |
| |
| env = os.environ.copy() |
| env["USE_RBE"] = "false" |
| |
| benchmark_dir = args.outdir / args.label |
| if benchmark_dir.exists(): |
| exit(f"Benchmark directory already exists: {benchmark_dir}") |
| else: |
| benchmark_dir.mkdir() |
| |
| target_prefixes_cc = load_target_prefixes(TOOLCHAIN_CC_MODULE_LIST_PATH) |
| target_prefixes_java = load_target_prefixes(TOOLCHAIN_JAVA_MODULE_LIST_PATH) |
| target_prefixes_rust = load_target_prefixes(TOOLCHAIN_RUST_MODULE_LIST_PATH) |
| |
| with open(benchmark_dir / "summary.csv", "w") as csv: |
| csv.write( |
| "Tag,Size (bytes),Average Duration (ms)," + |
| ",".join([f"Iteration {i}" for i in range(0, args.iterations)]) + "\n") |
| |
| for toolchain in args.toolchains: |
| file_root_name = toolchain.name.rsplit('.', 2)[0] |
| toolchain_records_dir = benchmark_dir / file_root_name |
| |
| if toolchain_records_dir.exists(): |
| exit(f"Toolchain results directory already exists: {toolchain_records_dir}") |
| else: |
| toolchain_records_dir.mkdir() |
| |
| prebuilt_path = prepare_prebuilts(toolchain) |
| |
| print(f"Prebuilt path: {prebuilt_path}") |
| |
| # Calculate size of prebuilts |
| prebuilt_size_bytes = 0 |
| for f in prebuilt_path.rglob("*"): |
| prebuilt_size_bytes += f.stat().st_size |
| |
| traces: list[TraceInfo] = [] |
| for i in range(0, args.iterations): |
| print(f"Benchmarking {toolchain} : iteration {i}") |
| run_build_target("clean", prebuilt_version=TEST_VERSION_NUMBER, env=env) |
| run_build_target("rust", prebuilt_version=TEST_VERSION_NUMBER, env=env) |
| traces.append( |
| process_trace( |
| shutil.move(OUT_PATH_TRACE, toolchain_records_dir / f"build.trace.{i}.gz"), |
| target_prefixes_cc, target_prefixes_java, target_prefixes_rust)) |
| |
| rust_times = [t.duration_rust_μs for t in traces] |
| record_elements = [ |
| file_root_name, |
| prebuilt_size_bytes, |
| int(sum(rust_times) / args.iterations), |
| ] + rust_times |
| record_line = ",".join([str(el) for el in record_elements]) |
| |
| csv.write(record_line + "\n") |
| |
| print("\nDone!") |
| |
| |
| if __name__ == "__main__": |
| main() |