| #!/usr/bin/env python3 |
| # |
| # Copyright (C) 2019 The Android Open Source Project |
| # |
| # Licensed under the Apache License, Version 2.0 (the "License"); |
| # you may not use this file except in compliance with the License. |
| # You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| |
| """Creates a tarball suitable for use as a Rust prebuilt for Android.""" |
| |
| import argparse |
| from datetime import datetime |
| import os |
| from pathlib import Path |
| import re |
| import shutil |
| import subprocess |
| import sys |
| import tarfile |
| import tempfile |
| import urllib.request |
| |
| import boltifyer |
| import context |
| |
| import audit |
| |
| from android_rust import build_platform, cargo, config, source_manager |
| from android_rust.paths import * |
| from android_rust.toolchains import CLANG_TOOLCHAIN_HOST, LLVM_VERSION_DEFAULT, ClangToolchain, RustToolchain, export_profiles |
| from android_rust.utils import ( |
| TERM_GREEN, |
| TERM_RED, |
| TERM_YELLOW, |
| ScriptException, |
| ExtantPath, |
| ResolvedPath, |
| archive_create, |
| archive_extract, |
| compress_file, |
| get_prebuilt_binary_paths, |
| is_archive, |
| is_elf_file, |
| print_colored, |
| run_and_exit_on_failure, |
| run_quiet, |
| set_symlink, |
| ) |
| from android_rust.upstream_tests import disabled_tests, skipped_tests |
| |
| import llvm_android.configs |
| import llvm_android.builders |
| |
| # |
| # Types |
| # |
| |
| |
| class CrateRedistributionInfo: |
| |
| def __init__(self, version: str, genfiles: list[tuple[str, str]] = []): |
| self.version = version |
| self.genfiles = genfiles.copy() |
| |
| |
| CRI = CrateRedistributionInfo |
| |
| # |
| # Constants |
| # |
| |
| STDLIB_TOP_LEVEL_SOURCES = [ |
| "Cargo.lock", |
| "library", |
| ] |
| |
| VENDORED_CRATE_REDISTRIBUTION_INFO = { |
| "backtrace": CRI("0.3.73"), |
| "cfg-if": CRI("1.0.0"), |
| "compiler_builtins": CRI("0.1.133", [("aarch64-unknown-none", "outlined_atomics.rs")]), |
| "getopts": CRI("0.2.21"), |
| "hashbrown": CRI("0.15.0"), |
| "libc": CRI("0.2.161"), |
| "rustc-demangle": CRI("0.1.24"), |
| "unicode-width": CRI("0.1.14"), |
| } # yapf: disable |
| |
| LLVM_BUILD_PATHS_OF_INTEREST: list[str] = [ |
| "build.ninja", |
| "cmake", |
| "CMakeCache.txt", |
| "CMakeFiles", |
| "cmake_install.cmake", |
| "compile_commands.json", |
| "CPackConfig.cmake", |
| "CPackSourceConfig.cmake", |
| "install_manifest.txt", |
| "llvm.spec" |
| ] |
| |
| # Matches both release archive names and CI build names, e.g.: |
| # * android-ndk-r26-linux.zip |
| # * android-ndk-10792818-linux-x86_64.zip |
| PATTERN_NDK_ARCHIVE = re.compile(r"android-ndk(?:-\w+)?-linux(?:-x86_64)?\.zip") |
| |
| # |
| # Program logic |
| # |
| |
| |
| def parse_args(argv: list[str] | None) -> argparse.Namespace: |
| """Parses arguments and returns the parsed structure.""" |
| parser = argparse.ArgumentParser("Build the Rust Toolchain") |
| parser.add_argument( |
| "--build-name", |
| "-b", |
| default=build_platform.system(), |
| help="Release name for the dist result") |
| parser.add_argument( |
| "--bolt-name", help="Name for the archive containing the BOLTed version of the toolchain") |
| parser.add_argument( |
| "--dist", |
| "-d", |
| dest="dist_path", |
| type=ResolvedPath, |
| default=(os.environ.get("DIST_DIR") or DIST_PATH_DEFAULT), |
| help="Where to place distributable artifacts") |
| parser.add_argument( |
| "--copy-and-patch", |
| action=argparse.BooleanOptionalAction, |
| default=True, |
| help="Whether to copy and patch the source, or reuse from a prior run") |
| parser.add_argument( |
| "--repatch", |
| action="store_true", |
| help="Don't copy the whole source. Just copy and repatch the files affected by the patches.") |
| parser.add_argument( |
| "--stage", "-s", type=str, choices=["1", "2", "3"], help="Target Rust boostrap stage") |
| parser.add_argument( |
| "--cargo-audit", |
| action=argparse.BooleanOptionalAction, |
| default=False, |
| help="To run `cargo vet` and `cargo deny` or not") |
| parser.add_argument( |
| "--verbose", action=argparse.BooleanOptionalAction, default=True, help="Verbose") |
| |
| build_staging_group = parser.add_mutually_exclusive_group() |
| build_staging_group.add_argument( |
| "--config-only", |
| action="store_true", |
| help="Setup up the source code and configure it and then exit") |
| build_staging_group.add_argument( |
| "--package-only", |
| action="store_true", |
| help="Assume the build has completed successfully and only perform packaging steps") |
| |
| ndk_group = parser.add_mutually_exclusive_group() |
| ndk_group.add_argument( |
| "--ndk", |
| type=ExtantPath, |
| dest="ndk_path", |
| help="Path to location of the NDK to build against") |
| ndk_group.add_argument( |
| "--ndk-search-path", |
| type=ExtantPath, |
| help="Where to search for an NDK archive (android-ndk-*-linux-x86_64)") |
| |
| pgo_group = parser.add_mutually_exclusive_group() |
| pgo_group.add_argument( |
| "--profile-generate", |
| type=ResolvedPath, |
| nargs="?", |
| const=OUT_PATH_PROFILES, |
| help="Instrument the compiler and store profiles in the specified directory") |
| pgo_group.add_argument( |
| "--profile-use", |
| type=ExtantPath, |
| nargs="?", |
| const=OUT_PATH_PROFILES, |
| help="Use the rust.profdata and llvm.profdata files in the provided " |
| "directory to optimize the compiler") |
| |
| parser.add_argument( |
| "--cs-profile-generate", |
| type=ResolvedPath, |
| nargs="?", |
| const=OUT_PATH_PROFILES, |
| help="Instrument the LLVM libraries to generate context-sensitive profiles") |
| |
| parser.add_argument( |
| "--bolt-opt", |
| action=argparse.BooleanOptionalAction, |
| default=False, |
| help="Produce an additional BOLT optimized copy of the toolchain") |
| |
| bolt_group = parser.add_mutually_exclusive_group() |
| bolt_group.add_argument( |
| "--bolt-profile-generate", |
| type=ExtantPath, |
| nargs="?", |
| const=OUT_PATH_PROFILES, |
| help="Add BOLT instrumentation to the toolchain") |
| bolt_group.add_argument("--bolt-profile-use", type=ExtantPath, help="Path to BOLT profiles") |
| |
| parser.add_argument( |
| "--lto", "-l", default="none", choices=["none", "thin"], help="Type of LTO to perform") |
| parser.add_argument("--emit-relocs", action="store_true", help="Emit relocation information") |
| parser.add_argument( |
| "--cgu1", |
| action=argparse.BooleanOptionalAction, |
| default=False, |
| help="Set `-C codegen-units=1` when building the toolchain") |
| parser.add_argument( |
| "--llvm-linkage", |
| default="shared", |
| choices=["static", "shared"], |
| help="Specify if LLVM should be built as a static or shared library") |
| parser.add_argument( |
| "--host", |
| default=build_platform.triple(), |
| help="Override the auto-detected host architecture") |
| |
| parser.add_argument( |
| "--bare-targets", |
| action=argparse.BooleanOptionalAction, |
| default=True, |
| help="Don't build libraries for bare targets") |
| parser.add_argument( |
| "--device-targets", |
| action=argparse.BooleanOptionalAction, |
| default=True, |
| help="Don't build libraries for devices") |
| parser.add_argument( |
| "--host-multilibs", |
| action=argparse.BooleanOptionalAction, |
| default=True, |
| help="Don't build libraries for alternate host triples") |
| parser.add_argument( |
| "--host-only", |
| action=argparse.BooleanOptionalAction, |
| default=False, |
| help="Implies no-bare-targets, no-device-targets, and no-host-multilibs") |
| |
| goal_group = parser.add_mutually_exclusive_group() |
| goal_group.add_argument("--llvm-only", action="store_true", help="Build LLVM then stop") |
| goal_group.add_argument( |
| "--upstream-test", |
| action="store_true", |
| help="Run upstream tests as part of the build process") |
| goal_group.add_argument( |
| "--upstream-test-only", |
| action="store_true", |
| help="Run upstream tests and exit without building a full toolchain") |
| |
| prebuilts_group = parser.add_argument_group() |
| prebuilts_group.add_argument( |
| "--rust-prebuilt", |
| type=ExtantPath, |
| default=RUST_HOST_STAGE0_PATH, |
| help="Path to Rust toolchain to bootstrap this build") |
| prebuilts_group.add_argument( |
| "--clang-prebuilt", |
| type=ExtantPath, |
| help="Path to archive or directory containing the Clang executables to use when building the Rust toolchain") |
| prebuilts_group.add_argument( |
| "--llvm-prebuilt", |
| type=ExtantPath, |
| help="Path to archive or directory containing the LLVM libraries and executables to use when building the Rust toolchain") |
| prebuilts_group.add_argument( |
| "--unified-prebuilt", |
| type=ExtantPath, |
| help="Path to archive or directory containing the unified set of Clang/LLVM libraries and executables to use when building the Rust toolchain" |
| ) |
| |
| prebuilts_group.add_argument( |
| "--llvm-prebuilt-has-rust-patches", |
| action="store_true", |
| help="Tell Rust that the LLVM prebuilt was built with Rust patches") |
| prebuilts_group.add_argument( |
| "--llvm-version", |
| default=LLVM_VERSION_DEFAULT, |
| help="Major version number for the LLVM prebuilt") |
| prebuilts_group.add_argument( |
| "--rust-stage0-triple", |
| default=RUST_STAGE0_TRIPLE_DEFAULT, |
| help="Host triple for the provided Rust stage0 prebuilt") |
| |
| zstd_group = parser.add_argument_group() |
| zstd_group.add_argument( |
| "--zstd-library", type=ExtantPath, default=None, help="Path to libzstd.a") |
| zstd_group.add_argument( |
| "--zstd-include", type=ExtantPath, default=None, help="Path to zstd include directory.") |
| |
| args = parser.parse_args(argv) |
| |
| if build_platform.is_darwin(): |
| if args.profile_generate is not None or args.profile_use is not None: |
| sys.exit("PGO is not supported on the Darwin platform") |
| |
| if args.host != build_platform.triple(): |
| sys.exit("Cross compiling toolchains is not supported on the Darwin platform") |
| |
| if args.cs_profile_generate is not None and args.llvm_linkage == "static" and args.lto is None: |
| sys.exit("Context-sensitive PGO with LLVM static linkage requires LTO to be enabled") |
| |
| if args.ndk_search_path is not None and not args.ndk_search_path.is_dir(): |
| sys.exit(f"NDK search path is not a directory: {args.ndk_search_path}") |
| |
| if args.llvm_only is None == args.llvm_prebuilt is None: |
| sys.exit("Invalid argument combination: --llvm-only and --llvm-prebuilt") |
| |
| if args.unified_prebuilt is not None: |
| if args.clang_prebuilt or args.llvm_prebuilt: |
| sys.exit( |
| "If the `--unified-prebuilts` flag is set Clang and LLVM prebuilts may not be specified independently") |
| elif args.clang_prebuilt is None: |
| args.clang_prebuilt = LLVM_PREBUILT_PATH_HOST |
| |
| if args.zstd_library is None != args.zstd_include is None: |
| sys.exit("The --zstd-library and --zstd-include flags must be used together") |
| |
| elif args.zstd_library is not None and (args.unified_prebuilt or args.llvm_prebuilt): |
| sys.exit("Zstd prebuilts are only used/necessary when building the in-tree version of LLVM") |
| |
| if args.host_only: |
| args.bare_targets = False |
| args.device_targets = False |
| args.host_multilibs = False |
| |
| if args.bolt_name is None: |
| args.bolt_name = args.build_name + "-bolt" |
| |
| if args.bolt_profile_use is not None or args.bolt_profile_generate is not None: |
| args.bolt_opt = True |
| |
| if args.bolt_opt: |
| args.emit_relocs = True |
| |
| return args |
| |
| |
| def audit_toolchain_package_dir(args: argparse.Namespace) -> None: |
| if args.config_only or args.package_only: |
| return |
| |
| elif build_platform.is_linux(): |
| shared_libs_allow_list = audit.get_allow_list() |
| shared_libs_actual = audit.get_required_libs(OUT_PATH_PACKAGE) |
| |
| print("\n") |
| if shared_libs_actual != shared_libs_allow_list: |
| print_colored("Warning - Shared library requirements changed", TERM_YELLOW) |
| |
| new_reqs = [lib for lib in shared_libs_actual if lib not in shared_libs_allow_list] |
| old_reqs = [lib for lib in shared_libs_allow_list if lib not in shared_libs_actual] |
| |
| if old_reqs: |
| print_colored("Warning - stale shared library allow list entries:", TERM_YELLOW) |
| for lib_path in old_reqs: |
| print_colored(f"\t{lib_path}", TERM_YELLOW) |
| |
| if new_reqs: |
| print_colored("Error - new shared library requirements:", TERM_RED) |
| for lib_path in new_reqs: |
| print_colored(f"\t{lib_path}", TERM_RED) |
| raise ScriptException("The generated toolchain failed the shared library audit") |
| |
| else: |
| print_colored("No new shared libraries detected", TERM_GREEN) |
| |
| print("") |
| |
| |
| def audit_vendored_crates(args: argparse.Namespace) -> None: |
| if args.package_only: |
| return |
| |
| # Audit the Rust toolchain source's vendored crates |
| |
| missing_crate_versions = [] |
| |
| for crate_name in VENDORED_CRATE_REDISTRIBUTION_INFO: |
| if not (RUST_SOURCE_PATH / versioned_vendor_crate_path(crate_name)).exists(): |
| missing_crate_versions.append( |
| (crate_name, VENDORED_CRATE_REDISTRIBUTION_INFO[crate_name].version)) |
| |
| if len(missing_crate_versions) != 0: |
| raise ScriptException( |
| "Rust toolchain vendored crate versions need to be updated:\n" + "\n".join( |
| [f"\t{crate_name}: {version}" for (crate_name, version) in missing_crate_versions])) |
| |
| |
| def boltify_toolchain(args: argparse.Namespace) -> None: |
| if args.config_only or args.package_only: |
| return |
| |
| if args.bolt_opt: |
| boltifyer.boltify_toolchain( |
| OUT_PATH_PACKAGE, |
| args.dist_path, |
| args.bolt_name, |
| strip_only=False, |
| profile_generate=args.bolt_profile_generate, |
| profile_use=args.bolt_profile_use) |
| |
| |
| def build_extra_tools(args: argparse.Namespace, env: dict[str, str]) -> None: |
| if args.config_only or args.package_only: |
| return |
| |
| # |
| # Build cargo-deny and cargo-vet |
| # |
| |
| if (args.cargo_audit and not args.llvm_only and args.host == build_platform.triple() and |
| not build_platform.is_darwin()): |
| new_toolchain = RustToolchain(OUT_PATH_PACKAGE, host=build_platform.system()) |
| cargo_deny = cargo.Crate( |
| WORKSPACE_PATH / "toolchain" / "cargo-deny", |
| env, |
| cargo_path=new_toolchain.cargo(), |
| target=args.host, |
| linker=config.get_wrapper_paths(args.host)[2]) |
| cargo_deny.cargo_install(OUT_PATH_PACKAGE) |
| cargo_vet = cargo.Crate( |
| WORKSPACE_PATH / "toolchain" / "cargo-vet", |
| env, |
| cargo_path=new_toolchain.cargo(), |
| target=args.host, |
| linker=config.get_wrapper_paths(args.host)[2]) |
| cargo_vet.cargo_install(OUT_PATH_PACKAGE) |
| |
| # TODO(jamesfarrell): Run "cargo test". |
| # cargo-vet needs https://github.com/mozilla/cargo-vet/pull/521 |
| # cargo-deny has a test that accesses the network that needs to go on a denylist. |
| |
| |
| def build_toolchain(args: argparse.Namespace, env: dict[str, str]) -> None: |
| if args.config_only or args.package_only: |
| return |
| |
| # |
| # Build |
| # |
| |
| # Trigger bootstrap to trigger vendoring |
| # |
| # Call is not checked because this is *expected* to fail - there isn't a |
| # user facing way to directly trigger the bootstrap, so we give it a |
| # no-op to perform that will require it to write out the cargo config. |
| run_quiet([BIN_PATH_PYTHON, OUT_PATH_RUST_SOURCE / "x.py", "--help"], |
| cwd=OUT_PATH_RUST_SOURCE, |
| env=env) |
| |
| # Because some patches may have touched vendored source we will rebuild |
| # specific Cargo.lock files. |
| regenerate_lockfile(args.rust_prebuilt, OUT_PATH_RUST_SOURCE / "library", env) |
| |
| # We only need to perform stage 3 of the bootstrap process when we are |
| # collecting profile data. |
| if not args.stage: |
| if args.profile_generate or args.cs_profile_generate: |
| args.stage = "3" |
| else: |
| args.stage = "2" |
| |
| # Build the compiler |
| |
| if not args.upstream_test_only: |
| if args.llvm_only: |
| result = subprocess.run( |
| [BIN_PATH_PYTHON, OUT_PATH_RUST_SOURCE / "x.py", "build", "llvm"], |
| cwd=OUT_PATH_RUST_SOURCE, |
| env=env) |
| |
| if result.returncode == 0: |
| for subdir in ["bin", "include", "lib"]: |
| shutil.copytree( |
| OUT_PATH_RUST_SOURCE / "build" / "host" / "llvm" / subdir, |
| OUT_PATH_PACKAGE / subdir, |
| dirs_exist_ok=True) |
| |
| shutil.rmtree(OUT_PATH_PACKAGE / "lib" / "cmake") |
| |
| else: |
| raise ScriptException("Failed to build LLVM") |
| |
| else: |
| result = subprocess.run( |
| [ |
| BIN_PATH_PYTHON, |
| OUT_PATH_RUST_SOURCE / "x.py", |
| "--stage", |
| args.stage, |
| "install" |
| ], |
| cwd=OUT_PATH_RUST_SOURCE, |
| env=env) # yapf: disable |
| |
| if result.returncode != 0: |
| llvm_build_path = OUT_PATH_RUST_SOURCE / "build" / args.rust_stage0_triple / "llvm" / "build" |
| if llvm_build_path.exists(): |
| tarball_datetime = datetime.now().isoformat(timespec="seconds") |
| tarball_path = args.dist_path / f"llvm-build-config-{tarball_datetime}.tar" |
| |
| with tarfile.open(tarball_path, mode="x") as tar_ref: |
| orig_cwd = os.getcwd() |
| os.chdir(llvm_build_path) |
| for llvm_path in LLVM_BUILD_PATHS_OF_INTEREST: |
| tar_ref.add(llvm_path) |
| os.chdir(orig_cwd) |
| |
| compress_file(tarball_path) |
| |
| raise ScriptException(f"Build stage failed with error {result.returncode}") |
| |
| |
| def initialize_build(args: argparse.Namespace) -> dict[str, str]: |
| # |
| # Initialize directories, links, and prebuilts |
| # |
| |
| OUT_PATH.mkdir(exist_ok=True) |
| OUT_PATH_WRAPPERS.mkdir(exist_ok=True) |
| |
| if not args.package_only: |
| if OUT_PATH_PACKAGE.exists(): |
| shutil.rmtree(OUT_PATH_PACKAGE) |
| |
| OUT_PATH_PACKAGE.mkdir() |
| |
| args.dist_path.mkdir(exist_ok=True) |
| |
| if not build_platform.is_windows(): |
| # Set up links to host tools. This is a temporary workaround and will be |
| # removed when there are no more host tools required to build the |
| # toolchain. |
| OUT_PATH_BIN_LINKS.mkdir(exist_ok=True, parents=True) |
| |
| which_perl = shutil.which("perl") |
| if which_perl is not None: |
| set_symlink(Path(which_perl), LINK_PATH_PERL) |
| else: |
| raise RuntimeError("Unable to find Perl executable") |
| |
| which_sh = shutil.which("sh") |
| if which_sh is not None: |
| set_symlink(Path(which_sh), LINK_PATH_SH) |
| else: |
| raise RuntimeError("Unable to find `sh` executable") |
| |
| which_strip = shutil.which("strip") |
| if which_strip is not None: |
| set_symlink(Path(which_strip), LINK_PATH_STRIP) |
| else: |
| raise RuntimeError("Unable to find `strip` executable") |
| |
| set_symlink(BIN_PATH_ZIPTOOL, LINK_PATH_UNZIP) |
| |
| initialize_ndk(args) |
| initialize_rust_prebuilt(args) |
| initialize_llvm_and_clang_prebuilts(args) |
| initialize_zstd(args) |
| |
| # |
| # Setup source files |
| # |
| |
| if not args.package_only: |
| if args.copy_and_patch: |
| source_manager.setup_files( |
| RUST_SOURCE_PATH, OUT_PATH_RUST_SOURCE, PATCHES_PATH, repatch=args.repatch) |
| |
| if build_platform.is_darwin(): |
| # To avoid using system libc++.dylib on Darwin hosts we need to copy |
| # the prebuilt version into the build directory's lib dir. This is |
| # necessary because buildbot security policies do not allow for |
| # modifying the DYLD_LIBRARY_PATH environment variable. |
| OUT_PATH_LLVM_LIB_DIR.mkdir(parents=True, exist_ok=True) |
| shutil.copy2(CLANG_TOOLCHAIN_HOST.lib() / "libc++.dylib", OUT_PATH_LLVM_LIB_DIR) |
| |
| # |
| # Configure Rust |
| # |
| |
| env = dict(os.environ) |
| config.configure(args, env) |
| |
| # Install one final symlink now that the compiler wrappers have been |
| # generated. |
| if build_platform.is_linux(): |
| set_symlink(OUT_PATH_WRAPPERS / f"linker-{build_platform.triple()}", LINK_PATH_CC) |
| |
| # Flush stdout to ensure correct output ordering in the logs |
| sys.stdout.flush() |
| |
| return env |
| |
| |
| def initialize_ndk(args: argparse.Namespace) -> None: |
| if args.ndk_search_path is not None: |
| for path in args.ndk_search_path.iterdir(): |
| if path.is_file() and PATTERN_NDK_ARCHIVE.match(path.name): |
| args.ndk_path = extract_ndk_archive(path) |
| return |
| |
| raise ScriptException(f"Unable to locate NDK archive in search path {args.ndk_search_path}") |
| |
| elif args.ndk_path is None: |
| if env_ndk_str := os.environ.get("ANDROID_NDK_PATH"): |
| args.ndk_path = Path(env_ndk_str) |
| |
| if not args.ndk_path.exists(): |
| raise ScriptException(f"ANDROID_NDK_PATH {args.ndk_path} does not exist") |
| else: |
| args.ndk_path = DOWNLOADS_PATH / NDK_DIRECTORY_NAME |
| if not args.ndk_path.exists(): |
| if not DOWNLOADS_PATH.exists(): |
| DOWNLOADS_PATH.mkdir() |
| ndk_archive_path = DOWNLOADS_PATH / NDK_RELEASE_ARCHIVE |
| if not ndk_archive_path.exists(): |
| print(f"Downloading NDK archive: {NDK_DOWNLOAD_URL}") |
| urllib.request.urlretrieve(NDK_DOWNLOAD_URL, ndk_archive_path) |
| |
| print("Decompressing NDK archive") |
| archive_extract(ndk_archive_path, DOWNLOADS_PATH) |
| |
| # Ensure that there is an empty Android.mk file to prevent Soong |
| # from recursing into the NDK directory. |
| (args.ndk_path / "Android.mk").touch() |
| |
| elif is_archive(args.ndk_path): |
| args.ndk_path = extract_ndk_archive(args.ndk_path) |
| |
| elif not args.ndk_path.is_dir(): |
| raise ScriptException(f"Unrecognized file format: {args.ndk_path.name}") |
| |
| |
| def initialize_rust_prebuilt(args: argparse.Namespace) -> None: |
| if args.rust_prebuilt is not None: |
| if is_archive(args.rust_prebuilt): |
| extract_dir = Path(tempfile.mkdtemp(prefix="rust-prebuilt-")) |
| print(f"Extracting Rust prebuilt archive to {extract_dir}") |
| archive_extract(args.rust_prebuilt, extract_dir) |
| args.rust_prebuilt = extract_dir |
| |
| args.rust_prebuilt = RustToolchain(args.rust_prebuilt, host=build_platform.system()) |
| |
| |
| def initialize_llvm_and_clang_prebuilts(args: argparse.Namespace) -> None: |
| if args.unified_prebuilt is not None: |
| if is_archive(args.unified_prebuilt): |
| extract_dir = Path(tempfile.mkdtemp(prefix="unified-prebuilt-")) |
| print(f"Extracting unified Clang/LLVM archive to {extract_dir}") |
| archive_extract(args.unified_prebuilt, extract_dir, strip_components=1) |
| args.unified_prebuilt = extract_dir |
| |
| args.clang_prebuilt = ClangToolchain( |
| args.unified_prebuilt, host=build_platform.system(), llvm_version=args.llvm_version) |
| args.llvm_prebuilt = args.clang_prebuilt |
| |
| else: |
| if args.clang_prebuilt is not None: |
| if is_archive(args.clang_prebuilt): |
| extract_dir = Path(tempfile.mkdtemp(prefix="clang-prebuilt-")) |
| print(f"Extracting Clang archive to {extract_dir}") |
| archive_extract(args.clang_prebuilt, extract_dir, strip_components=1) |
| args.clang_prebuilt = extract_dir |
| |
| args.clang_prebuilt = ClangToolchain(args.clang_prebuilt, host=build_platform.system()) |
| |
| if args.llvm_prebuilt is not None: |
| if is_archive(args.llvm_prebuilt): |
| extract_dir = Path(tempfile.mkdtemp(prefix="llvm-prebuilt-")) |
| print(f"Extracting LLVM archive to {extract_dir}") |
| archive_extract(args.llvm_prebuilt, extract_dir, strip_components=1) |
| args.llvm_prebuilt = extract_dir |
| |
| args.llvm_prebuilt = ClangToolchain( |
| args.llvm_prebuilt, host=build_platform.system(), llvm_version=args.llvm_version) |
| |
| if args.llvm_prebuilt is not None and build_platform.is_linux(): |
| # The default target triple for x86 Linux in LLVM is |
| # i386-unknown-linux-gnu. We add a symbolic link to the compiler |
| # runtimes using the i686 triple name, which is used by Rust. |
| runtimes_path_i686 = args.llvm_prebuilt.runtimes_path("i686-unknown-linux-gnu") |
| if not (runtimes_path_i686.is_symlink() or runtimes_path_i686.is_dir()): |
| runtimes_path_i686.symlink_to("i386-unknown-linux-gnu", target_is_directory=True) |
| |
| |
| # Must be called after initialize_llvm_and_clang_prebuilts() |
| def initialize_zstd(args: argparse.Namespace) -> None: |
| if args.llvm_prebuilt is None: |
| if build_platform.is_windows(): |
| return |
| elif "musl" not in args.host: |
| llvm_host_config: llvm_android.configs.Config = llvm_android.configs.host_config( |
| musl=False) |
| |
| zstd_builder = llvm_android.builders.ZstdBuilder([llvm_host_config]) |
| print("Building libzstd") |
| zstd_builder.build() |
| args.zstd_library = zstd_builder.link_libraries[0] |
| args.zstd_include = zstd_builder.include_dir |
| |
| |
| def extract_ndk_archive(archive_path: Path) -> Path: |
| print(f"Unzipping NKD archive {str(archive_path)}") |
| |
| # Before exiting the script will test to see if args.ndk_path is |
| # relative to the system temp directory and, if it is, will delete it. |
| extract_dir = Path(tempfile.mkdtemp(prefix="android_ndk-")) |
| archive_extract(archive_path, extract_dir) |
| |
| archive_top_level = list(extract_dir.iterdir()) |
| if len(archive_top_level) != 1 or not archive_top_level[0].name.startswith("android-ndk-"): |
| raise RuntimeError("Unrecognized NDK archive layout") |
| |
| return extract_dir / archive_top_level[0].name |
| |
| |
| def regenerate_lockfile( |
| rust_toolchain: RustToolchain, project_path: Path, env: dict[str, str]) -> None: |
| """Offline fetch to regenerate lockfiles""" |
| |
| run_and_exit_on_failure([rust_toolchain.cargo(), "fetch", "--offline"], |
| f"Failed to rebuilt {project_path}/Cargo.lock via cargo-fetch operation", |
| cwd=project_path, |
| env=env) |
| |
| |
| def package_toolchain(args: argparse.Namespace, env: dict[str, str]) -> None: |
| if args.config_only: |
| return |
| |
| # |
| # Install sources |
| # |
| |
| if not args.llvm_only and build_platform.is_linux(): |
| shutil.rmtree(OUT_PATH_STDLIB_SRCS, ignore_errors=True) |
| OUT_PATH_STDLIB_SRCS.mkdir(parents=True) |
| |
| stdlib_build_path = OUT_PATH_RUST_SOURCE / "build" / build_platform.triple() / f"stage{args.stage}-std" |
| |
| # Redistribute standard library sources |
| for stdlib_path in STDLIB_TOP_LEVEL_SOURCES: |
| src_path = OUT_PATH_RUST_SOURCE / stdlib_path |
| dst_path = OUT_PATH_STDLIB_SRCS / stdlib_path |
| if src_path.is_file(): |
| shutil.copy(src_path, dst_path) |
| else: |
| shutil.copytree(src_path, dst_path) |
| |
| # Redistribute vendored crates |
| for crate_name in VENDORED_CRATE_REDISTRIBUTION_INFO: |
| crate_src_path = OUT_PATH_RUST_SOURCE / versioned_vendor_crate_path(crate_name) |
| crate_dst_path = OUT_PATH_STDLIB_SRCS / vendor_crate_path(crate_name) |
| |
| shutil.copytree(crate_src_path, crate_dst_path) |
| |
| # Redistribute generated files |
| genfiles = VENDORED_CRATE_REDISTRIBUTION_INFO[crate_name].genfiles |
| if len(genfiles) > 0: |
| crate_genfile_path = crate_dst_path / "src" / "out" |
| crate_genfile_path.mkdir() |
| |
| for (target_name, file_name) in genfiles: |
| stdlib_target_build_path = stdlib_build_path / target_name / "release" / "build" |
| crate_build_path_glob_results = list( |
| stdlib_target_build_path.glob(f"{crate_name}-*")) |
| |
| if len(crate_build_path_glob_results) == 1: |
| genfile_src_path = crate_build_path_glob_results[0] / "out" / file_name |
| |
| if genfile_src_path.exists(): |
| shutil.copy(genfile_src_path, crate_genfile_path) |
| else: |
| print_colored( |
| f"Generated file not found for crate {crate_name}: {file_name}", |
| TERM_YELLOW) |
| |
| elif len(crate_build_path_glob_results) == 0: |
| print_colored( |
| f"Unable to find build directory for vendored crate {crate_name} in {stdlib_target_build_path}", |
| TERM_YELLOW) |
| |
| else: |
| print_colored( |
| f"Multiple output directories found for vendored crate {crate_name}", |
| TERM_YELLOW) |
| |
| # |
| # Symbol fixup |
| # |
| |
| # The Rust build doesn't have an option to auto-strip binaries so we do |
| # it here. We only strip symbols from executables and .so objects. |
| flag = "--strip-debug" if args.emit_relocs else "--strip-unneeded" |
| for obj_path in get_prebuilt_binary_paths(OUT_PATH_PACKAGE): |
| if is_elf_file(obj_path): |
| args.clang_prebuilt.strip_symbols(obj_path, flag) |
| |
| # |
| # File fixup |
| # |
| |
| copy_libs = [] |
| # Install the libc++ library to out/package/lib64/ |
| if build_platform.is_darwin(): |
| copy_libs.append(CLANG_TOOLCHAIN_HOST.lib() / "libc++.dylib") |
| copy_libs.append(CLANG_TOOLCHAIN_HOST.lib() / "libc++abi.dylib") |
| |
| elif build_platform.is_linux(): |
| copy_libs.append(CLANG_TOOLCHAIN_HOST.lib() / "libc++.so") |
| copy_libs.append(CLANG_TOOLCHAIN_HOST.lib() / "libunwind.so") |
| |
| if args.host == "x86_64-unknown-linux-musl": |
| musl_lib = CLANG_TOOLCHAIN_HOST.musl_lib() |
| assert (musl_lib) |
| |
| copy_libs.append(musl_lib / "libc++.so") |
| copy_libs.append(MUSL_SYSROOT64_PATH / "lib" / "libc_musl.so") |
| |
| if copy_libs: |
| lib64_path = OUT_PATH_PACKAGE / "lib64" |
| lib64_path.mkdir(exist_ok=True) |
| for lib in copy_libs: |
| shutil.copy2(lib, lib64_path / os.path.basename(lib)) |
| |
| # Some stdlib crates might include Android.mk or Android.bp files. |
| # If they do, filter them out. |
| if build_platform.is_linux(): |
| for file_path in OUT_PATH_STDLIB_SRCS.glob("**/Android.{mk,bp}"): |
| file_path.unlink() |
| |
| # Copy in the Bazel build definitions |
| if not args.llvm_only and args.host == "x86_64-unknown-linux-gnu": |
| shutil.copy(TOOLCHAIN_BAZEL_PATH, OUT_PATH_PACKAGE) |
| |
| # |
| # Dist |
| # |
| |
| print("Creating artifacts") |
| |
| export_profiles( |
| args.clang_prebuilt, args.profile_generate or args.cs_profile_generate, args.dist_path) |
| |
| if args.profile_use and args.profile_use != args.dist_path: |
| for p in args.profile_use.glob("*.profdata"): |
| shutil.copy(p, args.dist_path) |
| |
| archive_create(args.dist_path / f"rust-{args.build_name}", OUT_PATH_PACKAGE, overwrite=True) |
| |
| |
| def remove_temporary_directories(args: argparse.Namespace) -> None: |
| if args.config_only: |
| return |
| |
| if args.ndk_path.is_relative_to(tempfile.gettempdir()): |
| shutil.rmtree(args.ndk_path) |
| |
| if args.rust_prebuilt.base_dir.is_relative_to(tempfile.gettempdir()): |
| shutil.rmtree(args.rust_prebuilt.base_dir.as_posix()) |
| |
| if args.llvm_prebuilt is not None and args.llvm_prebuilt.base_dir.is_relative_to( |
| tempfile.gettempdir()): |
| shutil.rmtree(args.llvm_prebuilt.base_dir.as_posix()) |
| |
| |
| def test_toolchain(args: argparse.Namespace, env: dict[str, str]) -> None: |
| if args.config_only or args.package_only: |
| return |
| |
| if build_platform.is_linux() and (args.upstream_test or args.upstream_test_only): |
| result = subprocess.run( |
| [ |
| BIN_PATH_PYTHON, |
| OUT_PATH_RUST_SOURCE / "x.py", |
| "--stage", args.stage, |
| "--target", args.host, |
| "test", |
| "--no-doc", |
| ] + |
| disabled_tests + |
| ["--"] + |
| skipped_tests, |
| cwd=OUT_PATH_RUST_SOURCE, |
| env=env) # yapf: disable |
| |
| if result.returncode != 0: |
| raise ScriptException(f"Test stage failed with error {result.returncode}") |
| |
| if args.upstream_test_only: |
| remove_temporary_directories(args) |
| sys.exit(0) |
| |
| |
| def versioned_vendor_crate_path(crate_name: str) -> str: |
| if not crate_name in VENDORED_CRATE_REDISTRIBUTION_INFO: |
| raise ScriptException(f"No redistribution information for vendored crate {crate_name}") |
| |
| return f"vendor/{crate_name}-{VENDORED_CRATE_REDISTRIBUTION_INFO[crate_name].version}" |
| |
| |
| def vendor_crate_path(crate_name: str) -> str: |
| return f"vendor/{crate_name}" |
| |
| |
| def main(argv: list[str] | None = None) -> None: |
| """Runs the configure-build-fixup-dist pipeline.""" |
| |
| args = parse_args(argv) |
| args.dist_path.mkdir(exist_ok=True) |
| with open(args.dist_path / BUILD_COMMAND_RECORD_NAME, "w") as f: |
| f.write(" ".join(argv or sys.argv)) |
| |
| # Add some output padding to make the messages easier to read |
| print() |
| |
| audit_vendored_crates(args) |
| env = initialize_build(args) |
| build_toolchain(args, env) |
| test_toolchain(args, env) |
| package_toolchain(args, env) |
| boltify_toolchain(args) |
| remove_temporary_directories(args) |
| audit_toolchain_package_dir(args) |
| |
| |
| if __name__ == "__main__": |
| try: |
| main() |
| sys.exit(0) |
| |
| except (ScriptException, argparse.ArgumentTypeError) as err: |
| print_colored(str(err), TERM_RED) |
| sys.exit(1) |