| #!/bin/sh |
| # |
| # Copyright (C) 2018 The Android Open Source Project |
| # |
| # Licensed under the Apache License, Version 2.0 (the "License"); |
| # you may not use this file except in compliance with the License. |
| # You may obtain a copy of the License at |
| # |
| # http://www.apache.org/licenses/LICENSE-2.0 |
| # |
| # Unless required by applicable law or agreed to in writing, software |
| # distributed under the License is distributed on an "AS IS" BASIS, |
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| # See the License for the specific language governing permissions and |
| # limitations under the License. |
| # |
| """:" # Shell script (in docstring to appease pylint) |
| # Find and invoke hermetic python3 interpreter |
| . "`dirname $0`/envsetup.sh"; exec "$PY3" "$0" "$@" |
| # Shell script end |
| |
| Invoke trusty build system and run tests. |
| """ |
| |
| import argparse |
| import getpass |
| import json |
| import multiprocessing |
| import os |
| import pathlib |
| import re |
| import shutil |
| import stat |
| import subprocess |
| import sys |
| from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED |
| |
| import run_tests |
| import trusty_build_config |
| from trusty_build_config import ( |
| TrustyAndroidTest, |
| TrustyBuildConfig, |
| TrustyPortTest, |
| TrustyCompositeTest, |
| ) |
| |
| from log_processor import LogEngine |
| |
| TRUSTED_APP_MAKEFILE_PATH = "trusty/user/base/make/trusted_app.mk" |
| TRUSTED_LOADABLE_APP_MAKEFILE_PATH = "trusty/kernel/make/loadable_app.mk" |
| GEN_MANIFEST_MAKEFILE_PATH = "trusty/user/base/make/gen_manifest.mk" |
| |
| ZIP_CREATE_SYSTEM_UNIX = 3 |
| SYMLINK_MODE = stat.S_IFLNK | stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO |
| |
| |
| def get_new_build_id(build_root): |
| """Increment build-id file and return new build-id number.""" |
| path = os.path.join(build_root, "BUILDID") |
| try: |
| with open(path, "r", encoding="utf-8") as f: |
| num = int(f.read()) + 1 |
| except IOError: |
| num = 1 |
| with open(path, "w", encoding="utf-8") as f: |
| f.write(str(num)) |
| f.truncate() |
| # Return buildid string: <user>@<hostname>-<num> |
| # Use getpass.getuser() to avoid non-portability/failure of |
| # os.getlogin() |
| return getpass.getuser() + "@" + os.uname()[1] + "-" + str(num) |
| |
| |
| def mkdir(path): |
| """Create directory including parents if it does not already exist.""" |
| try: |
| os.makedirs(path) |
| except OSError: |
| if not os.path.isdir(path): |
| raise |
| |
| |
| def copy_file(src, dest, optional=False): |
| """Copy a file. |
| |
| Copy a file or exit if the file cannot be copied. |
| |
| Args: |
| src: Path of file to copy. |
| dest: Path to copy file to. |
| optional: Optional boolean argument. If True don't exit if source file |
| does not exist. |
| """ |
| if not os.path.exists(src) and optional: |
| return |
| print("Copy:", repr(src), "->", repr(dest)) |
| shutil.copy(src, dest) |
| |
| |
| def archive_build_file(args, project, src, dest=None, optional=False): |
| """Copy a file to build archive directory. |
| |
| Construct src and dest path and call copy_file. |
| |
| Args: |
| args: Program arguments. |
| project: Project name. |
| src: Source path relative to project build dir. |
| dest: Optional dest path relative to archive dir. Can be omitted if src |
| is a simple filename. |
| optional: Optional boolean argument. If True don't exit if source file |
| does not exist. |
| """ |
| if not dest: |
| dest = src |
| src = os.path.join(args.build_root, "build-" + project, src) |
| # dest must be a fixed path for repeated builds of the same artifact |
| # for compatibility with prebuilt update scripts. |
| # Project is fine because that specifies what artifact is being looked |
| # for - LK for a specific target. |
| # BUILD_ID or feature selections that may change are not, because the |
| # prebuilt update script cannot predict the path at which the artifact |
| # will live. |
| dest = os.path.join(args.archive, project + "." + dest) |
| copy_file(src, dest, optional=optional) |
| |
| |
| def archive_symlink(zip_archive, arcname, target): |
| """Add a symbolic link to the archive |
| |
| Args: |
| zip_archive: Archive to update |
| arcname: Filename in the archive to be added |
| target: Symbolic link target |
| """ |
| zinfo = ZipInfo(arcname) |
| zinfo.create_system = ZIP_CREATE_SYSTEM_UNIX |
| zinfo.external_attr = SYMLINK_MODE << 16 |
| zip_archive.writestr(zinfo, target) |
| |
| |
| def is_child_of_any(path, possible_parents): |
| for possible_parent in possible_parents: |
| if path.startswith(possible_parent): |
| return True |
| return False |
| |
| |
| def archive_dir(zip_archive, src, dest, omit=()): |
| """Recursively add a directory to a ZIP file. |
| |
| Recursively add the src directory to the ZIP with dest path inside the |
| archive. |
| |
| Args: |
| zip_archive: A ZipFile opened for append or write. |
| src: Source directory to add to the archive. |
| dest: Destination path inside the archive (must be a relative path). |
| omit: List of directorys to omit from the archive. Specified as relative |
| paths from `src`. |
| """ |
| for root, dirs, files in os.walk(src): |
| rel_root = os.path.relpath(root, start=src) |
| if is_child_of_any(rel_root, omit): |
| continue |
| |
| for d in dirs: |
| dir_path = os.path.join(root, d) |
| |
| if os.path.islink(dir_path): |
| archive_dest = os.path.join( |
| dest, os.path.relpath(dir_path, start=src) |
| ) |
| archive_symlink( |
| zip_archive, archive_dest, os.readlink(dir_path) |
| ) |
| |
| for f in files: |
| file_path = os.path.join(root, f) |
| archive_dest = os.path.join( |
| dest, os.path.relpath(file_path, start=src) |
| ) |
| if os.path.islink(file_path): |
| archive_symlink( |
| zip_archive, archive_dest, os.readlink(file_path) |
| ) |
| else: |
| zip_archive.write(file_path, archive_dest) |
| |
| |
| def archive_file(zip_archive, src_file, dest_dir="", optional=False): |
| """Add a file to a ZIP file. |
| |
| Adds src_file to archive in the directory dest_dir, relative to the root of |
| the archive. |
| |
| Args: |
| zip_archive: A ZipFile opened for append or write. |
| src_file: Source file to add to the archive. |
| dest_dir: Relative destination path in the archive for this file. |
| optional: Optional boolean argument. If True don't exit if source file |
| does not exist. |
| """ |
| if not os.path.exists(src_file) and optional: |
| return |
| zip_archive.write( |
| src_file, os.path.join(dest_dir, os.path.basename(src_file)) |
| ) |
| |
| |
| def assemble_sdk(build_config, args): |
| """Assemble Trusty SDK archive""" |
| filename = os.path.join(args.archive, "trusty_sdk-" + args.buildid + ".zip") |
| with ZipFile(filename, "a", compression=ZIP_DEFLATED) as sdk_archive: |
| print("Building SDK archive ZIP...") |
| for project in args.project: |
| print(f"Adding SDK project... ({project})") |
| project_buildroot = os.path.join( |
| args.build_root, "build-" + project |
| ) |
| |
| project_sysroot_dir = os.path.join("sysroots", project, "usr") |
| src = os.path.join(project_buildroot, "sdk", "sysroot", "usr") |
| archive_dir(sdk_archive, src, project_sysroot_dir, omit=["lib/doc"]) |
| |
| src = os.path.join(project_buildroot, "sdk", "LICENSE") |
| archive_file(sdk_archive, src) |
| |
| project_makefile_dir = os.path.join("make", project) |
| src = os.path.join(project_buildroot, "sdk", "make") |
| archive_dir(sdk_archive, src, project_makefile_dir) |
| |
| project_tools_dir = os.path.join("sysroots", project, "tools") |
| src = os.path.join( |
| project_buildroot, "host_tools", "apploader_package_tool" |
| ) |
| archive_file(sdk_archive, src, project_tools_dir, optional=True) |
| |
| src = os.path.join( |
| project_buildroot, "sdk", "tools", "manifest_compiler.py" |
| ) |
| archive_file(sdk_archive, src, project_tools_dir) |
| |
| project_keys = build_config.signing_keys(project) |
| for filename in project_keys: |
| archive_file(sdk_archive, filename, project_tools_dir) |
| |
| print("Adding SDK sundries...") |
| |
| # Copy the app makefile |
| archive_file(sdk_archive, TRUSTED_APP_MAKEFILE_PATH, "make") |
| archive_file(sdk_archive, TRUSTED_LOADABLE_APP_MAKEFILE_PATH, "make") |
| archive_file(sdk_archive, GEN_MANIFEST_MAKEFILE_PATH, "make") |
| |
| # Copy doc files |
| for doc_file in build_config.doc_files: |
| archive_file(sdk_archive, doc_file) |
| |
| # Add clang version info |
| envsetup = os.path.join(args.script_dir, "envsetup.sh") |
| cmd = f"source {envsetup} && echo $CLANG_BINDIR" |
| clang_bindir = ( |
| subprocess.check_output(cmd, shell=True, executable="/bin/bash") |
| .decode() |
| .strip() |
| ) |
| clang_dir = os.path.join(clang_bindir, "../") |
| |
| cmd = f"cd {clang_dir}; git rev-parse HEAD" |
| clang_prebuilt_commit = ( |
| subprocess.check_output(cmd, shell=True, executable="/bin/bash") |
| .decode() |
| .strip() |
| ) |
| |
| archive_file( |
| sdk_archive, |
| os.path.join(clang_dir, "AndroidVersion.txt"), |
| "clang-version", |
| ) |
| archive_file( |
| sdk_archive, |
| os.path.join(clang_dir, "clang_source_info.md"), |
| "clang-version", |
| ) |
| sdk_archive.writestr( |
| os.path.join("clang-version", "PrebuiltCommitId.txt"), |
| clang_prebuilt_commit, |
| ) |
| |
| # Add trusty version info |
| sdk_archive.writestr("Version.txt", args.buildid) |
| |
| # Add the toolchain if requested |
| if args.archive_toolchain: |
| _head, clang_ver = os.path.split(os.path.realpath(clang_dir)) |
| print(f"Adding SDK toolchain... ({clang_ver})") |
| archive_dir( |
| sdk_archive, clang_dir, os.path.join("toolchain", clang_ver) |
| ) |
| archive_symlink( |
| sdk_archive, os.path.join("toolchain", "clang"), clang_ver |
| ) |
| |
| |
| def build(args): |
| """Call build system and copy build files to archive dir.""" |
| mkdir(args.build_root) |
| |
| if args.buildid is None: |
| args.buildid = get_new_build_id(args.build_root) |
| print("BuildID", args.buildid) |
| |
| nice = "" if args.no_nice else "nice" |
| |
| # build projects |
| failed = [] |
| |
| for project in args.project: |
| cmd = ( |
| f"export BUILDROOT={args.build_root};" |
| f"export BUILDID={args.buildid};" |
| f"{nice} $BUILDTOOLS_BINDIR/make {project} " |
| f"-f $LKROOT/makefile -j {args.jobs}" |
| ) |
| # Call envsetup. If it fails, abort. |
| envsetup = os.path.join(args.script_dir, "envsetup.sh") |
| cmd = f"source {envsetup:s} && ({cmd:s})" |
| |
| # check if we are attached to a real terminal |
| terminal_output = sys.stdout.isatty() |
| |
| if args.color_log and terminal_output: |
| # postprocess output with custom log processor |
| |
| # define additional env variable for make to generate log markers |
| cmd = f"export LOG_POSTPROCESSING=1; {cmd:s}" |
| |
| with ( |
| open(project + ".log", "wt", encoding="utf-8") as log_file, |
| LogEngine(log_file) as log_engine, |
| ): |
| status = subprocess.call( |
| cmd, |
| shell=True, |
| executable="/bin/bash", |
| stdout=log_engine.stdout, |
| stderr=log_engine.stderr, |
| ) |
| else: # no output intercepting |
| status = subprocess.call(cmd, shell=True, executable="/bin/bash") |
| |
| print("cmd: '" + cmd + "' returned", status) |
| if status: |
| failed.append(project) |
| |
| if failed: |
| print() |
| print("some projects have failed to build:") |
| print(str(failed)) |
| sys.exit(1) |
| |
| |
| def zip_dir(zip_archive, src, dest, filterfunc=lambda _: True): |
| """Recursively add a directory to a ZIP file. |
| |
| Recursively add the src directory to the ZIP with dest path inside the |
| archive. |
| |
| Args: |
| zip_archive: A ZipFile opened for append or write. |
| src: Source directory to add to the archive. |
| dest: Destination path inside the archive (must be a relative path). |
| """ |
| for root, _dirs, files in os.walk(src): |
| for f in files: |
| if not filterfunc(f): |
| continue |
| file_path = os.path.join(root, f) |
| archive_dest = os.path.join( |
| dest, os.path.relpath(file_path, start=src) |
| ) |
| zip_archive.write(file_path, archive_dest) |
| |
| |
| def zip_file(zip_archive, src_file, dest_dir=""): |
| """Add a file to a ZIP file. |
| |
| Adds src_file to archive in the directory dest_dir, relative to the root of |
| the archive. |
| |
| Args: |
| zip_archive: A ZipFile opened for append or write. |
| src_file: Source file to add to the archive. |
| dest_dir: Relative destination path in the archive for this file. |
| """ |
| zip_archive.write( |
| src_file, os.path.join(dest_dir, os.path.basename(src_file)) |
| ) |
| |
| |
| def archive_symbols(args, project): |
| """Archive symbol files for the kernel and each trusted app""" |
| proj_buildroot = os.path.join(args.build_root, "build-" + project) |
| filename = os.path.join(args.archive, f"{project}-{args.buildid}.syms.zip") |
| |
| with ZipFile(filename, "a", compression=ZIP_DEFLATED) as zip_archive: |
| print("Archiving symbols in " + os.path.relpath(filename, args.archive)) |
| |
| # archive the kernel elf file |
| zip_file(zip_archive, os.path.join(proj_buildroot, "lk.elf")) |
| |
| # archive the kernel symbols |
| zip_file(zip_archive, os.path.join(proj_buildroot, "lk.elf.sym")) |
| zip_file(zip_archive, os.path.join(proj_buildroot, "lk.elf.sym.sorted")) |
| |
| # archive path/to/app.syms.elf for each trusted app |
| zip_dir( |
| zip_archive, proj_buildroot, "", lambda f: f.endswith("syms.elf") |
| ) |
| |
| |
| def archive_listings(args, project): |
| """Archive lst files for the kernel and each trusted app""" |
| proj_buildroot = os.path.join(args.build_root, "build-" + project) |
| filename = os.path.join(args.archive, f"{project}-{args.buildid}.lst.zip") |
| |
| with ZipFile(filename, "a", compression=ZIP_DEFLATED) as zip_archive: |
| print("Archiving .lst in " + os.path.relpath(filename, args.archive)) |
| |
| # archive all .lst files under the buildroot |
| zip_dir( |
| zip_archive, proj_buildroot, "", lambda f: f.endswith(".lst") |
| ) |
| |
| |
| def create_uuid_map(args, project): |
| """Creating a mapping txt file for uuid and symbol files""" |
| |
| def time_from_bytes(f, n: int) -> str: |
| """Read n bytes from f as an int, and convert that int to a string.""" |
| rtime = int.from_bytes(f.read(n), byteorder="little") |
| width = 2 * n |
| return f"{rtime:0{width}x}" |
| |
| proj_buildroot = os.path.join(args.build_root, "build-" + project) |
| uuidmapfile = os.path.join(args.archive, "uuid-map.txt") |
| zipfile = os.path.join(args.archive, f"{project}-{args.buildid}.syms.zip") |
| sym_files = list(pathlib.Path(proj_buildroot).rglob("*.syms.elf")) |
| |
| for file in sym_files: |
| folder = file.parents[0] |
| manifest_files = list(pathlib.Path(folder).glob("*.manifest")) |
| if len(manifest_files) == 1: |
| manifest = manifest_files[0] |
| with open(manifest, "rb") as f: |
| time_low = time_from_bytes(f, 4) |
| time_mid = time_from_bytes(f, 2) |
| time_hi_and_version = time_from_bytes(f, 2) |
| clock_seq_and_node = [time_from_bytes(f, 1) for _ in range(8)] |
| uuid_str = ( |
| f"{time_low}-{time_mid}-{time_hi_and_version}-" |
| f"{clock_seq_and_node[0]}{clock_seq_and_node[1]}-" |
| f"{clock_seq_and_node[2]}{clock_seq_and_node[3]}" |
| f"{clock_seq_and_node[4]}{clock_seq_and_node[5]}" |
| f"{clock_seq_and_node[6]}{clock_seq_and_node[7]}" |
| ) |
| with open(uuidmapfile, "a", encoding="utf-8") as f: |
| f.write(f"{uuid_str}, {file.relative_to(proj_buildroot)}\n") |
| |
| if os.path.exists(uuidmapfile): |
| with ZipFile(zipfile, "a", compression=ZIP_DEFLATED) as zip_archive: |
| zip_file(zip_archive, uuidmapfile) |
| os.remove(uuidmapfile) |
| |
| |
| def create_scripts_archive(args, project): |
| """Create an archive for the scripts""" |
| coverage_script = os.path.join(args.script_dir, "genReport.py") |
| scripts_zip = os.path.join( |
| args.archive, f"{project}-{args.buildid}.scripts.zip" |
| ) |
| if not os.path.exists(coverage_script): |
| print("Coverage script does not exist!") |
| return |
| |
| with ZipFile(scripts_zip, "a", compression=ZIP_DEFLATED) as zip_archive: |
| zip_file(zip_archive, coverage_script) |
| |
| |
| def archive(build_config, args): |
| if args.archive is None: |
| return |
| |
| mkdir(args.archive) |
| |
| # Copy the files we care about to the archive directory |
| for project in args.project: |
| # config-driven archiving |
| for item in build_config.dist: |
| archive_build_file( |
| args, project, item.src, item.dest, optional=item.optional |
| ) |
| |
| # copy out tos.img if it exists |
| archive_build_file(args, project, "tos.img", optional=True) |
| |
| # copy out monitor if it exists |
| archive_build_file( |
| args, project, "monitor/monitor.bin", "monitor.bin", optional=True |
| ) |
| |
| # copy out trusty.padded if it exists |
| archive_build_file(args, project, "trusty.padded", optional=True) |
| |
| # copy out trusty.signed if it exists |
| archive_build_file(args, project, "trusty.signed", optional=True) |
| |
| # copy out trusty_usb.signed if it exists |
| archive_build_file(args, project, "trusty_usb.signed", optional=True) |
| |
| # copy out lk image |
| archive_build_file(args, project, "lk.bin") |
| archive_build_file(args, project, "lk.elf") |
| |
| # copy out qemu package if it exists |
| archive_build_file( |
| args, project, "trusty_qemu_package.zip", optional=True |
| ) |
| |
| # copy out emulator image package if it exists |
| archive_build_file( |
| args, project, "trusty_image_package.tar.gz", optional=True |
| ) |
| |
| # copy out test package if it exists |
| archive_build_file( |
| args, project, "trusty_test_package.zip", optional=True |
| ) |
| |
| # export the app package tool for use in the SDK. This can go away once |
| # all the SDK patches have landed, as the tool will be packaged in the |
| # SDK zip. |
| archive_build_file( |
| args, |
| project, |
| "host_tools/apploader_package_tool", |
| "apploader_package_tool", |
| optional=True, |
| ) |
| |
| # copy out symbol files for kernel and apps |
| archive_symbols(args, project) |
| |
| # copy out listings files for kernel and apps |
| archive_listings(args, project) |
| |
| # create map between UUID and symbolic files |
| create_uuid_map(args, project) |
| |
| # create zip file containing scripts |
| create_scripts_archive(args, project) |
| |
| # create sdk zip |
| assemble_sdk(build_config, args) |
| |
| |
| def get_build_deps(project_name, project, project_names, already_built): |
| if project_name not in already_built: |
| already_built.add(project_name) |
| for dep_project_name, dep_project in project.also_build.items(): |
| get_build_deps( |
| dep_project_name, dep_project, project_names, already_built |
| ) |
| project_names.append(project_name) |
| |
| |
| def create_test_map(args, build_config, projects): |
| for project_name in projects: |
| test_map = {} |
| test_map["port_tests"] = [] |
| test_map["commands"] = [] |
| test_names = set() |
| duplicates = set() |
| project = build_config.get_project(project_name) |
| |
| if not project or not project.tests: |
| return |
| |
| port_test_prefix = "android-port-test:" |
| project_type_prefix = re.compile("([^:]+:)+") |
| |
| for test in project.tests: |
| test_type = None |
| match test: |
| case TrustyCompositeTest() if any( |
| s |
| for s in test.sequence |
| if s.name.startswith(port_test_prefix) |
| ): |
| test_type = TrustyCompositeTest |
| case TrustyAndroidTest() if test.name.startswith( |
| port_test_prefix |
| ): |
| test_type = TrustyPortTest |
| case TrustyAndroidTest(): |
| test_type = TrustyAndroidTest |
| case _: |
| pass |
| |
| if test_type: |
| test_obj = {"needs": []} |
| test_name = re.sub(project_type_prefix, "", test.name) |
| |
| if test_name in test_names: |
| duplicates.add(test_name) |
| continue |
| test_names.add(test_name) |
| |
| if hasattr(test, "need") and hasattr(test.need, "flags"): |
| test_obj["needs"] = list(test.need.flags) |
| if hasattr(test, "port_type"): |
| test_obj["type"] = str(test.port_type) |
| |
| match test_type: |
| case trusty_build_config.TrustyPortTest: |
| test_obj["port_name"] = test_name |
| test_map["port_tests"].append(test_obj) |
| case trusty_build_config.TrustyAndroidTest: |
| test_obj["command_name"] = test_name |
| test_obj["command"] = test.command |
| test_map["commands"].append(test_obj) |
| case trusty_build_config.TrustyCompositeTest: |
| test_obj["port_name"] = test_name |
| test_obj["sequence"] = [] |
| |
| for subtest in test.sequence: |
| subtest_name = re.sub( |
| project_type_prefix, "", subtest.name |
| ) |
| test_obj["sequence"].append(subtest_name) |
| if hasattr(subtest, "need") and hasattr( |
| subtest.need, "flags" |
| ): |
| test_obj["needs"] += list(subtest.need.flags) |
| |
| test_obj["needs"] += list(set(test_obj["needs"])) |
| |
| test_map["port_tests"].append(test_obj) |
| |
| if duplicates: |
| print("ERROR: The following port tests are included multiple times") |
| for port in duplicates: |
| print(port) |
| sys.exit(-1) |
| |
| project_buildroot = os.path.join( |
| args.build_root, "build-" + project_name |
| ) |
| zip_path = os.path.join(project_buildroot, "trusty_test_package.zip") |
| with ZipFile(zip_path, "a", compression=ZIP_DEFLATED) as zipf: |
| zipf.writestr( |
| project_name + "-test-map.json", json.dumps(test_map, indent=4) |
| ) |
| |
| |
| def main(default_config=None, emulator=True): |
| parser = argparse.ArgumentParser() |
| |
| parser.add_argument( |
| "project", |
| type=str, |
| nargs="*", |
| default=[".test.all"], |
| help="Project to build and/or test.", |
| ) |
| parser.add_argument( |
| "--build-root", |
| type=os.path.abspath, |
| default=None, |
| help="Root of intermediate build directory.", |
| ) |
| parser.add_argument( |
| "--archive", |
| type=str, |
| default=None, |
| help="Location of build artifacts directory. If " |
| "omitted, no artifacts will be produced.", |
| ) |
| parser.add_argument( |
| "--archive-toolchain", |
| action="store_true", |
| help="Include the clang toolchain in the archive.", |
| ) |
| parser.add_argument("--buildid", type=str, help="Server build id") |
| parser.add_argument( |
| "--jobs", |
| type=str, |
| default=multiprocessing.cpu_count(), |
| help="Max number of build jobs.", |
| ) |
| parser.add_argument( |
| "--test", |
| type=str, |
| action="append", |
| help="Manually specify test(s) to run. " |
| "Only build projects that have test(s) enabled that " |
| "matches a listed regex.", |
| ) |
| parser.add_argument( |
| "--verbose", |
| action="store_true", |
| help="Verbose debug output from test(s).", |
| ) |
| parser.add_argument( |
| "--debug-on-error", |
| action="store_true", |
| help="Wait for debugger connection if test fails.", |
| ) |
| parser.add_argument( |
| "--clang", action="store_true", default=None, help="Build with clang." |
| ) |
| parser.add_argument("--skip-build", action="store_true", help="Skip build.") |
| parser.add_argument( |
| "--skip-tests", action="store_true", help="Skip running tests." |
| ) |
| parser.add_argument( |
| "--run-disabled-tests", |
| action="store_true", |
| help="Also run disabled tests.", |
| ) |
| parser.add_argument( |
| "--skip-project", |
| action="append", |
| default=[], |
| help="Remove project from projects being built.", |
| ) |
| parser.add_argument( |
| "--config", |
| type=str, |
| help="Path to an alternate " "build-config file.", |
| default=default_config, |
| ) |
| parser.add_argument( |
| "--android", |
| type=str, |
| help="Path to an Android build to run tests against.", |
| ) |
| parser.add_argument( |
| "--color-log", |
| action="store_true", |
| help="Use colored build logs with pinned status lines.", |
| ) |
| parser.add_argument( |
| "--no-nice", |
| action="store_true", |
| help="Do not use nice to run the build.", |
| ) |
| parser.add_argument( |
| "--script-dir", |
| type=os.path.abspath, |
| default=os.path.dirname(os.path.abspath(__file__)), |
| help="Override the path to the directory of the script. This is for a " |
| "workaround to support the Soong-built binary." |
| ) |
| args = parser.parse_args() |
| |
| |
| # Change the current directory to the Trusty root |
| # We do this after parsing all the arguments because |
| # some of the paths, e.g., script-dir, might be relative |
| # to the directory that the script was called from, not |
| # to the Trusty root directory |
| top = os.path.abspath(os.path.join(args.script_dir, "../../../../..")) |
| os.chdir(top) |
| |
| if not args.build_root: |
| args.build_root = os.path.join(top, "build-root") |
| |
| # Depending on trusty_build_config.py's default config path doesn't work on |
| # the Soong-built python binary. |
| config_file = args.config |
| if not config_file: |
| config_file = os.path.join(args.script_dir, "build-config") |
| |
| build_config = TrustyBuildConfig( |
| config_file=config_file, android=args.android |
| ) |
| |
| projects = [] |
| for project in args.project: |
| if project == ".test.all": |
| projects += build_config.get_projects(build=True) |
| elif project == ".test": |
| projects += build_config.get_projects(build=True, have_tests=True) |
| else: |
| projects.append(project) |
| |
| # skip specific projects |
| ok = True |
| for skip in args.skip_project: |
| if skip in projects: |
| projects.remove(skip) |
| else: |
| sys.stderr.write(f"ERROR unknown project --skip-project={skip}\n") |
| ok = False |
| if not ok: |
| sys.exit(1) |
| |
| # If there's any test filters, ignore projects that don't have |
| # any tests that match those filters. |
| test_filters = ( |
| [re.compile(test) for test in args.test] if args.test else None |
| ) |
| if test_filters: |
| projects = run_tests.projects_to_test( |
| build_config, |
| projects, |
| test_filters, |
| run_disabled_tests=args.run_disabled_tests, |
| ) |
| |
| # find build dependencies |
| projects_old = projects |
| projects = [] |
| built_projects = set() |
| for project_name in projects_old: |
| get_build_deps( |
| project_name, |
| build_config.get_project(project_name), |
| projects, |
| built_projects, |
| ) |
| args.project = projects |
| |
| print("Projects", str(projects)) |
| |
| if args.skip_build: |
| print("Skip build for", args.project) |
| else: |
| build(args) |
| create_test_map(args, build_config, projects) |
| archive(build_config, args) |
| |
| # Run tests |
| if not args.skip_tests: |
| test_result = run_tests.test_projects( |
| build_config, |
| args.build_root, |
| projects, |
| qemu_instance_id=None, |
| run_disabled_tests=args.run_disabled_tests, |
| test_filters=test_filters, |
| verbose=args.verbose, |
| debug_on_error=args.debug_on_error, |
| emulator=emulator, |
| ) |
| |
| test_result.print_results() |
| if test_result.failed_projects: |
| sys.exit(1) |
| |
| |
| if __name__ == "__main__": |
| main() |