Snap for 8603585 from 882a18888febb9cb0b9d6c6069498cbc4aa30f88 to mainline-tzdata3-release
Change-Id: Ie9770476af8f55d751f1541c7ca528e471fc411f
diff --git a/.style.yapf b/.style.yapf
index 97f7d65..c4472bd 100644
--- a/.style.yapf
+++ b/.style.yapf
@@ -1,3 +1,4 @@
[style]
-based_on_style = chromium
+based_on_style = pep8
blank_line_before_module_docstring = true
+indent_width = 2
diff --git a/OWNERS b/OWNERS
index 01737d3..632a5d2 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,2 +1,2 @@
set noparent
-include OWNERS.toolchain
+include toolchain/llvm_android:/OWNERS
diff --git a/OWNERS.android b/OWNERS.android
deleted file mode 100644
index be9e61f..0000000
--- a/OWNERS.android
+++ /dev/null
@@ -1 +0,0 @@
-include platform/prebuilts/clang/host/linux-x86:/OWNERS
diff --git a/OWNERS.toolchain b/OWNERS.toolchain
index 48ecc26..67a4cd0 100644
--- a/OWNERS.toolchain
+++ b/OWNERS.toolchain
@@ -1,12 +1,12 @@
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
[email protected]
# Temporary; see comment #2 on crbug.com/982498
[email protected]
diff --git a/PRESUBMIT.cfg b/PRESUBMIT.cfg
index d69d375..5fccf3a 100644
--- a/PRESUBMIT.cfg
+++ b/PRESUBMIT.cfg
@@ -1,2 +1,6 @@
[Hook Scripts]
toolchain_utils_presubmits = ./toolchain_utils_githooks/check-presubmit ${PRESUBMIT_FILES}
+
+[Hook Overrides Options]
+cros_license_check: --exclude_regex=\b(default_remotes)$
+long_line_check: --exclude_regex=\b(default_remotes)$
diff --git a/afdo_metadata/kernel_afdo.json b/afdo_metadata/kernel_afdo.json
index 714ba35..49e1927 100644
--- a/afdo_metadata/kernel_afdo.json
+++ b/afdo_metadata/kernel_afdo.json
@@ -1,14 +1,14 @@
{
- "chromeos-kernel-3_18": {
- "name": "R89-13638.0-1607337469"
- },
"chromeos-kernel-4_4": {
- "name": "R89-13638.0-1607337135"
+ "name": "R100-14516.0-1645439511"
},
"chromeos-kernel-4_14": {
- "name": "R89-13638.0-1607337215"
+ "name": "R100-14516.0-1645439661"
},
"chromeos-kernel-4_19": {
- "name": "R89-13638.0-1607337429"
+ "name": "R100-14516.0-1645439606"
+ },
+ "chromeos-kernel-5_4": {
+ "name": "R100-14516.0-1645439482"
}
}
diff --git a/afdo_tools/bisection/afdo_prof_analysis.py b/afdo_tools/bisection/afdo_prof_analysis.py
index 94e5366..ce8afd6 100755
--- a/afdo_tools/bisection/afdo_prof_analysis.py
+++ b/afdo_tools/bisection/afdo_prof_analysis.py
@@ -24,9 +24,6 @@
import argparse
import json
-# Pylint recommends we use "from chromite.lib import cros_logging as logging".
-# Chromite specific policy message, we want to keep using the standard logging
-# pylint: disable=cros-logging-import
import logging
import os
import random
diff --git a/afdo_tools/update_kernel_afdo b/afdo_tools/update_kernel_afdo
index a677bf1..ff0ab22 100755
--- a/afdo_tools/update_kernel_afdo
+++ b/afdo_tools/update_kernel_afdo
@@ -5,114 +5,245 @@
# Due to crbug.com/1081332, we need to update AFDO metadata
# manually. This script performs a few checks and generates a
-# new kernel_afdo.json file, which can then be committed.
+# new kernel_afdo.json file, which can then be submitted.
#
-# USAGE:
-# toolchain-utils$ ./afdo_tools/update_kernel_afdo
-#
-# The script modifies the JSON file and shows the git diff.
-#
-# If the changes look good, git commit them. Example commit
-# message (from crrev.com/c/2197462):
-#
-# afdo_metadata: Publish the new kernel profiles
-#
-# Update chromeos-kernel-3_18 to R84-13080.0-1589189810
-# Update chromeos-kernel-4_4 to R84-13080.0-1589189726
-# Update chromeos-kernel-4_14 to R84-13080.0-1589190025
-# Update chromeos-kernel-4_19 to R84-13080.0-1589189550
-#
-# BUG=None
-# TEST=Verified in kernel-release-afdo-verify-orchestrator.
-#
+
+USAGE="
+Usage: $(basename $0) [main|beta|stable|all] [--help]
+
+Description:
+ The script takes one optional argument which is the channel where we want
+to update the kernel afdo and creates a commit (or commits with \"all\"
+channels) in the corresponding branch.
+ No arguments defaults to \"all\".
+ Follow the prompt to submit the changes.
+ NO CLEAN-UP NEEDED. The script ignores any local changes and keeps
+the current branch unchanged.
+"
set -eu
set -o pipefail
-CROS_REPO=https://chromium.googlesource.com/chromiumos/overlays/chromiumos-overlay
GS_BASE=gs://chromeos-prebuilt/afdo-job/vetted/kernel
-KVERS="3.18 4.4 4.14 4.19"
-errs=""
-successes=0
+KVERS="4.4 4.14 4.19 5.4"
+failed_channels=""
+# Add skipped chrome branches in ascending order here.
+SKIPPED_BRANCHES="95"
script_dir=$(dirname "$0")
-tc_utils_dir="$script_dir/.."
-metadata_dir="$tc_utils_dir/afdo_metadata"
-outfile="$metadata_dir/kernel_afdo.json"
+tc_utils_dir="${script_dir}/.."
+metadata_dir="${tc_utils_dir}/afdo_metadata"
+outfile="$(realpath --relative-to="${tc_utils_dir}" \
+ "${metadata_dir}"/kernel_afdo.json)"
+# Convert toolchain_utils into the absolute path.
+abs_tc_utils_dir="$(realpath ${tc_utils_dir})"
-# The most recent Monday, in Unix timestamp format.
-if [ $(date +%a) = "Mon" ]
-then
- expected_time=$(date +%s -d 00:00:00)
-else
- expected_time=$(date +%s -d "last Monday")
-fi
+# Check profiles uploaded within the last week.
+expected_time=$(date +%s -d "week ago")
-# Get the current canary branch number (using beta + 1)
-beta=$(git ls-remote -h $CROS_REPO | \
- sed -n -e "s/^.*release-R\([0-9][0-9]*\).*$/\1/p" | \
- sort -g | tail -1)
-canary="$(($beta + 1))"
+declare -A branch branch_number commit
+remote_repo=$(git -C "${tc_utils_dir}" remote)
+canary_ref="refs/heads/main"
+# Read the last two release-Rxx from remote branches
+# and assign them to stable_ref and beta_ref.
+# sort -V is the version sort which puts R100 after R99.
+last_branches=$(git -C "${tc_utils_dir}" ls-remote -h "${remote_repo}" \
+ release-R\* | cut -f2 | sort -V | tail -n 2)
+# We need `echo` to convert newlines into spaces for read.
+read stable_ref beta_ref <<< $(echo ${last_branches})
+# Branch names which start from release-R.
+branch["beta"]=${beta_ref##*/}
+branch["stable"]=${stable_ref##*/}
+branch["canary"]=${canary_ref##*/}
-json="{"
-sep=""
-for kver in $KVERS
+# Get current branch numbers (number which goes after R).
+branch_number["stable"]=$(echo "${branch["stable"]}" | \
+ sed -n -e "s/^release-R\([0-9][0-9]*\).*$/\1/p")
+branch_number["beta"]=$(echo "${branch["beta"]}" | \
+ sed -n -e "s/^release-R\([0-9][0-9]*\).*$/\1/p")
+branch_number["canary"]="$((branch_number[beta] + 1))"
+for skipped_branch in $SKIPPED_BRANCHES ; do
+ if [[ ${branch_number["canary"]} == $skipped_branch ]] ; then
+ ((branch_number[canary]++))
+ fi
+done
+
+# Without arguments the script updates all branches.
+channels=${1:-"all"}
+case "${channels}" in
+ stable | canary | beta )
+ ;;
+ main )
+ channels="canary"
+ ;;
+ all )
+ channels="canary beta stable"
+ ;;
+ --help | help | -h )
+ echo "$USAGE"
+ exit 0
+ ;;
+ * )
+ echo "Channel \"${channels}\" is not supported.
+Must be main (or canary), beta, stable or all." >&2
+ echo "$USAGE"
+ exit 1
+esac
+
+# Fetch latest branches.
+git -C "${tc_utils_dir}" fetch "${remote_repo}"
+
+worktree_dir=$(mktemp -d)
+echo "-> Working in ${worktree_dir}"
+# Create a worktree and make changes there.
+# This way we don't need to clean-up and sync toolchain_utils before the
+# change. Neither we should care about clean-up after the submit.
+git -C "${tc_utils_dir}" worktree add --detach "${worktree_dir}"
+trap "git -C ${abs_tc_utils_dir} worktree remove ${worktree_dir}" EXIT
+cd "${worktree_dir}"
+
+for channel in ${channels}
do
- # Sort the gs output by timestamp (default ordering is by name, so
- # R86-13310.3-1594633089.gcov.xz goes after R86-13310.18-1595237847.gcov.xz)
- latest=$(gsutil.py ls -l "$GS_BASE/$kver/" | sort -k2 | \
- grep "R${canary}" | tail -1 || true)
- if [ -z "$latest" ]
- then
- # if no profiles exist for R${canary}, try the previous branch
+ errs=""
+ successes=0
+ curr_branch_number=${branch_number[${channel}]}
+ curr_branch=${branch[${channel}]}
+ echo
+ echo "Checking \"${channel}\" channel..."
+ echo "branch_number=${curr_branch_number} branch=${curr_branch}"
+ json="{"
+ sep=""
+ for kver in $KVERS
+ do
+ # Sort the gs output by timestamp (default ordering is by name, so
+ # R86-13310.3-1594633089.gcov.xz goes after R86-13310.18-1595237847.gcov.xz)
latest=$(gsutil.py ls -l "$GS_BASE/$kver/" | sort -k2 | \
- grep "R${beta}" | tail -1)
- fi
+ grep "R${curr_branch_number}" | tail -1 || true)
+ if [[ -z "$latest" && "${channel}" != "stable" ]]
+ then
+ # if no profiles exist for the current branch, try the previous branch
+ latest=$(gsutil.py ls -l "$GS_BASE/$kver/" | sort -k2 | \
+ grep "R$((curr_branch_number - 1))" | tail -1)
+ fi
- # Verify that the file has the expected date.
- file_time=$(echo "$latest" | awk '{print $2}')
- file_time_unix=$(date +%s -d "$file_time")
- if [ $file_time_unix -lt $expected_time ]
- then
- expected=$(env TZ=UTC date +%Y-%m-%dT%H:%M:%SZ -d @$expected_time)
- echo "Wrong date for $kver: $file_time is before $expected" >&2
- errs="$errs $kver"
- continue
- fi
+ # Verify that the file has the expected date.
+ file_time=$(echo "$latest" | awk '{print $2}')
+ file_time_unix=$(date +%s -d "$file_time")
+ if [ $file_time_unix -lt $expected_time ]
+ then
+ expected=$(env TZ=UTC date +%Y-%m-%dT%H:%M:%SZ -d @$expected_time)
+ echo "Wrong date for $kver: $file_time is before $expected" >&2
+ errs="$errs $kver"
+ continue
+ fi
- # Generate JSON.
- json_kver=$(echo "$kver" | tr . _)
- # b/147370213 (migrating profiles from gcov format) may result in the
- # pattern below no longer doing the right thing.
- name=$(echo "$latest" | sed 's%.*/\(.*\)\.gcov.*%\1%')
- json=$(cat <<EOT
+ # Generate JSON.
+ json_kver=$(echo "$kver" | tr . _)
+ # b/147370213 (migrating profiles from gcov format) may result in the
+ # pattern below no longer doing the right thing.
+ name=$(echo "$latest" | sed 's%.*/\(.*\)\.gcov.*%\1%')
+ json=$(cat <<EOT
$json$sep
"chromeos-kernel-$json_kver": {
"name": "$name"
}
EOT
-)
- sep=","
- successes=$((successes + 1))
+ )
+ sep=","
+ successes=$((successes + 1))
+ done
+
+ # If we did not succeed for any kvers, exit now.
+ if [[ $successes -eq 0 ]]
+ then
+ echo "error: AFDO profiles out of date for all kernel versions" >&2
+ failed_channels="${failed_channels} ${channel}"
+ continue
+ fi
+
+ git reset --hard HEAD
+ echo git checkout "${remote_repo}/${curr_branch}"
+ git checkout "${remote_repo}/${curr_branch}"
+
+ # Write new JSON file.
+ # Don't use `echo` since `json` might have esc characters in it.
+ printf "%s\n}\n" "$json" > "$outfile"
+
+ # If no changes were made, say so.
+ outdir=$(dirname "$outfile")
+ shortstat=$(cd "$outdir" && git status --short $(basename "$outfile"))
+ [ -z "$shortstat" ] && echo $(basename "$outfile")" is up to date." \
+ && continue
+
+ # If we had any errors, warn about them.
+ if [[ -n "$errs" ]]
+ then
+ echo "warning: failed to update $errs in ${channel}" >&2
+ failed_channels="${failed_channels} ${channel}"
+ continue
+ fi
+
+ git add afdo_metadata/kernel_afdo.json
+ case "${channel}" in
+ canary )
+ commit_contents="afdo_metadata: Publish the new kernel profiles
+
+Update chromeos-kernel-4_4
+Update chromeos-kernel-4_14
+Update chromeos-kernel-4_19
+Update chromeos-kernel-5_4
+
+BUG=None
+TEST=Verified in kernel-release-afdo-verify-orchestrator"
+ ;;
+ beta | stable )
+ commit_contents="afdo_metadata: Publish the new kernel profiles\
+ in ${curr_branch}
+
+Have PM pre-approval because this shouldn't break the release branch.
+
+BUG=None
+TEST=Verified in kernel-release-afdo-verify-orchestrator"
+ ;;
+ * )
+ echo "internal error: unhandled channel \"${channel}\"" >&2
+ exit 2
+ esac
+
+ git commit -v -e -m "${commit_contents}"
+
+ commit[${channel}]=$(git -C "${worktree_dir}" rev-parse HEAD)
done
-# If we did not succeed for any kvers, exit now.
-if [ $successes -eq 0 ]
+echo
+# Array size check doesn't play well with the unbound variable option.
+set +u
+if [[ ${#commit[@]} -gt 0 ]]
then
- echo "error: AFDO profiles out of date for all kernel versions" >&2
- exit 2
+ set -u
+ echo "The change is applied in ${!commit[@]}."
+ echo "Run these commands to submit the change:"
+ echo
+ for channel in ${!commit[@]}
+ do
+ echo -e "\tgit -C ${tc_utils_dir} push ${remote_repo} \
+${commit[${channel}]}:refs/for/${branch[${channel}]}"
+ done
+
+ # Report failed channels.
+ if [[ -n "${failed_channels}" ]]
+ then
+ echo
+ echo "error: failed to update kernel afdo in ${failed_channels}" >&2
+ exit 3
+ fi
+else
+ # No commits. Check if it is due to failures.
+ if [[ -z "${failed_channels}" ]]
+ then
+ echo "No changes are applied. It looks like AFDO versions are up to date."
+ else
+ echo "error: update in ${failed_channels} failed" >&2
+ exit 3
+ fi
fi
-
-# Write new JSON file.
-printf "%s\n}\n" "$json" > "$outfile"
-
-# Show the changes.
-(cd "$tc_utils_dir" && git diff)
-
-# If no changes were made, say so.
-outdir=$(dirname "$outfile")
-shortstat=$(cd "$outdir" && git status --short $(basename "$outfile"))
-[ -n "$shortstat" ] || echo $(basename "$outfile")" is up to date."
-
-# If we had any errors, warn about them.
-[ -z "$errs" ] || echo "warning: failed to update$errs" >&2
diff --git a/android_bench_suite/Binder_flags_aosp.diff b/android_bench_suite/Binder_flags_aosp.diff
deleted file mode 100644
index 1e3ec6c..0000000
--- a/android_bench_suite/Binder_flags_aosp.diff
+++ /dev/null
@@ -1,43 +0,0 @@
-diff --git a/libs/binder/Android.bp b/libs/binder/Android.bp
-index f7347aef1..a539fac47 100644
---- a/libs/binder/Android.bp
-+++ b/libs/binder/Android.bp
-@@ -52,10 +52,12 @@ cc_library {
- "-Wall",
- "-Wextra",
- "-Werror",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
- product_variables: {
- binder32bit: {
-- cflags: ["-DBINDER_IPC_32BIT=1"],
-+ cflags: ["-DBINDER_IPC_32BIT=1",],
- },
- },
-
-@@ -76,4 +78,22 @@ cc_library {
- },
- }
-
--subdirs = ["tests"]
-+cc_test {
-+ name: "binderThroughputTest",
-+ srcs: ["tests/binderThroughputTest.cpp"],
-+ shared_libs: [
-+ "libbinder",
-+ "libutils",
-+ ],
-+ clang: true,
-+ cflags: [
-+ "-g",
-+ "-Wall",
-+ "-Werror",
-+ "-Wno-missing-field-initializers",
-+ "-Wno-sign-compare",
-+ "-O3",
-+ CFLAGS_FOR_BENCH_SUITE
-+ ],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+}
-+
diff --git a/android_bench_suite/Binder_flags_internal.diff b/android_bench_suite/Binder_flags_internal.diff
deleted file mode 100644
index 1e3ec6c..0000000
--- a/android_bench_suite/Binder_flags_internal.diff
+++ /dev/null
@@ -1,43 +0,0 @@
-diff --git a/libs/binder/Android.bp b/libs/binder/Android.bp
-index f7347aef1..a539fac47 100644
---- a/libs/binder/Android.bp
-+++ b/libs/binder/Android.bp
-@@ -52,10 +52,12 @@ cc_library {
- "-Wall",
- "-Wextra",
- "-Werror",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
- product_variables: {
- binder32bit: {
-- cflags: ["-DBINDER_IPC_32BIT=1"],
-+ cflags: ["-DBINDER_IPC_32BIT=1",],
- },
- },
-
-@@ -76,4 +78,22 @@ cc_library {
- },
- }
-
--subdirs = ["tests"]
-+cc_test {
-+ name: "binderThroughputTest",
-+ srcs: ["tests/binderThroughputTest.cpp"],
-+ shared_libs: [
-+ "libbinder",
-+ "libutils",
-+ ],
-+ clang: true,
-+ cflags: [
-+ "-g",
-+ "-Wall",
-+ "-Werror",
-+ "-Wno-missing-field-initializers",
-+ "-Wno-sign-compare",
-+ "-O3",
-+ CFLAGS_FOR_BENCH_SUITE
-+ ],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+}
-+
diff --git a/android_bench_suite/Dex2oat_flags_aosp.diff b/android_bench_suite/Dex2oat_flags_aosp.diff
deleted file mode 100644
index fcd611f..0000000
--- a/android_bench_suite/Dex2oat_flags_aosp.diff
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/compiler/Android.bp b/compiler/Android.bp
-index a1269dcaf..a9b62b474 100644
---- a/compiler/Android.bp
-+++ b/compiler/Android.bp
-@@ -215,6 +215,8 @@ art_cc_defaults {
- shared: {
- shared_libs: ["libcrypto"],
- },
-+ cflags: [CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
- }
-
- gensrcs {
diff --git a/android_bench_suite/Dex2oat_flags_internal.diff b/android_bench_suite/Dex2oat_flags_internal.diff
deleted file mode 100644
index fcd611f..0000000
--- a/android_bench_suite/Dex2oat_flags_internal.diff
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/compiler/Android.bp b/compiler/Android.bp
-index a1269dcaf..a9b62b474 100644
---- a/compiler/Android.bp
-+++ b/compiler/Android.bp
-@@ -215,6 +215,8 @@ art_cc_defaults {
- shared: {
- shared_libs: ["libcrypto"],
- },
-+ cflags: [CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
- }
-
- gensrcs {
diff --git a/android_bench_suite/Hwui_flags_aosp.diff b/android_bench_suite/Hwui_flags_aosp.diff
deleted file mode 100644
index 9e3b1df..0000000
--- a/android_bench_suite/Hwui_flags_aosp.diff
+++ /dev/null
@@ -1,50 +0,0 @@
-diff --git a/libs/hwui/Android.bp b/libs/hwui/Android.bp
-index 558cdc0faf3..1565be5b201 100644
---- a/libs/hwui/Android.bp
-+++ b/libs/hwui/Android.bp
-@@ -24,12 +24,15 @@ cc_defaults {
- "-Werror",
- "-fvisibility=hidden",
- "-DHWUI_NEW_OPS",
-+ CFLAGS_FOR_BENCH_SUITE
-
- // GCC false-positives on this warning, and since we -Werror that's
- // a problem
- "-Wno-free-nonheap-object",
- ],
-
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- include_dirs: [
- "external/skia/include/private",
- "external/skia/src/core",
-@@ -214,6 +217,9 @@ cc_defaults {
- export_proto_headers: true,
- },
-
-+ cflags: [CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- export_include_dirs: ["."],
- }
-
-@@ -229,7 +235,8 @@ cc_library {
- cc_library_static {
- name: "libhwui_static_null_gpu",
- defaults: ["libhwui_defaults"],
-- cflags: ["-DHWUI_NULL_GPU"],
-+ cflags: ["-DHWUI_NULL_GPU", CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
- srcs: [
- "debug/nullegl.cpp",
- "debug/nullgles.cpp",
-@@ -319,7 +326,8 @@ cc_benchmark {
- name: "hwuimicro",
- defaults: ["hwui_test_defaults"],
-
-- cflags: ["-DHWUI_NULL_GPU"],
-+ cflags: ["-DHWUI_NULL_GPU", CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-
- whole_static_libs: ["libhwui_static_null_gpu"],
-
diff --git a/android_bench_suite/Hwui_flags_internal.diff b/android_bench_suite/Hwui_flags_internal.diff
deleted file mode 100644
index 16a0222..0000000
--- a/android_bench_suite/Hwui_flags_internal.diff
+++ /dev/null
@@ -1,72 +0,0 @@
-diff --git a/libs/hwui/Android.bp b/libs/hwui/Android.bp
-index 303d05f084a..946aa9bb754 100644
---- a/libs/hwui/Android.bp
-+++ b/libs/hwui/Android.bp
-@@ -17,6 +17,7 @@ cc_defaults {
- "-Wunreachable-code",
- "-Werror",
- "-fvisibility=hidden",
-+ CFLAGS_FOR_BENCH_SUITE
-
- // GCC false-positives on this warning, and since we -Werror that's
- // a problem
-@@ -30,6 +31,8 @@ cc_defaults {
- //"-DANDROID_ENABLE_LINEAR_BLENDING",
- ],
-
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- include_dirs: [
- "external/skia/include/private",
- "external/skia/src/core",
-@@ -231,6 +234,9 @@ cc_defaults {
- export_proto_headers: true,
- },
-
-+ cflags: [CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- export_include_dirs: ["."],
- export_shared_lib_headers: ["libRScpp"],
- }
-@@ -240,7 +246,7 @@ cc_library {
- defaults: [
- "libhwui_defaults",
-
-- // Enables fine-grained GLES error checking
-+ // Enables fine-grained GLES error checking
- // If enabled, every GLES call is wrapped & error checked
- // Has moderate overhead
- "hwui_enable_opengl_validation",
-@@ -257,7 +263,8 @@ cc_library_static {
- "libhwui_defaults",
- "hwui_debug",
- ],
-- cflags: ["-DHWUI_NULL_GPU"],
-+ cflags: ["-DHWUI_NULL_GPU", CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
- srcs: [
- "debug/nullegl.cpp",
- ],
-@@ -354,6 +361,9 @@ cc_benchmark {
- whole_static_libs: ["libhwui"],
- shared_libs: ["libmemunreachable"],
-
-+ cflags: [CFLAGS_FOR_BENCH_SUITE],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- srcs: [
- "tests/macrobench/TestSceneRunner.cpp",
- "tests/macrobench/main.cpp",
-@@ -371,8 +381,11 @@ cc_benchmark {
- cflags: [
- "-include debug/wrap_gles.h",
- "-DHWUI_NULL_GPU",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- whole_static_libs: ["libhwui_static_debug"],
- shared_libs: ["libmemunreachable"],
-
diff --git a/android_bench_suite/README.md b/android_bench_suite/README.md
deleted file mode 100644
index b539c7c..0000000
--- a/android_bench_suite/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-# `android_bench_suite`
-
-This is a Android Toolchain benchmark suite.
-
-Where to find this suite:
-
-This suite locates at google3, please create a google3 branch first, then run:
-
-```
-$ cd experimental/users/zhizhouy/benchtoolchain
-```
-
-Copy this directory to the place you want to put it.
-
-To use this suite:
-1. Configure the basic envrionment in `env_setting` file.
-
-2. Run `./apply_patches.py`, which will:
-
- 1. Patch all the android benchmarks in the android tree.
- Benchmark Panorama does not exist in android tree, so perftests/
- gets copied into the top-level of android tree.
-
- 2. Apply patch autotest.diff to `android_root/external/autotest`, which
- includes all the test scripts for benchmarks. Copy testcases to
- related autotest directory.
-
- If you have applied the patch partially and hope to discard the
- patch, just run `discard_patches.py`
-
-3. Build and run benchmark on the device using ./run.py. You can either
- use test configuration file (-t `test_config`), or set all the variables
- manually.
-
-4. The raw results locate at `bench_result_*` in bench suite home
- directory.
-
-5. The JSON format result will be generated for crosperf report.
-
-Utility tools:
-
-1. Autotest is a test framework located in android exteranl/autotest
- Before first time running it, please run
- `utils/build_externals.py` first to ensure all the environments
- and tools needed are installed.
-
-2. Crosperf is a report generating tool in ChromeOS toolchain utilities.
- Please look for it in chromium source:
- `src/third_party/toolchain-utils/crosperf`.
diff --git a/android_bench_suite/Skia_flags_aosp.diff b/android_bench_suite/Skia_flags_aosp.diff
deleted file mode 100644
index b2ff242..0000000
--- a/android_bench_suite/Skia_flags_aosp.diff
+++ /dev/null
@@ -1,28 +0,0 @@
-diff --git a/Android.bp b/Android.bp
-index a581b0a53..36159c5ae 100644
---- a/Android.bp
-+++ b/Android.bp
-@@ -40,8 +40,11 @@ cc_library {
- "-Wno-clobbered",
- "-Wno-error",
- "-fexceptions",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- cppflags: [
- "-std=c++11",
- "-fno-threadsafe-statics",
-@@ -1470,8 +1473,11 @@ cc_test {
- "-U_FORTIFY_SOURCE",
- "-D_FORTIFY_SOURCE=1",
- "-DSKIA_IMPLEMENTATION=1",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- cppflags: [
- "-std=c++11",
- "-fno-threadsafe-statics",
diff --git a/android_bench_suite/Skia_flags_internal.diff b/android_bench_suite/Skia_flags_internal.diff
deleted file mode 100644
index 2eb6a1d..0000000
--- a/android_bench_suite/Skia_flags_internal.diff
+++ /dev/null
@@ -1,26 +0,0 @@
-diff --git a/Android.bp b/Android.bp
-index b4e1f5f701..13e1c6645f 100644
---- a/Android.bp
-+++ b/Android.bp
-@@ -9,7 +9,9 @@ cc_library {
- "-D_FORTIFY_SOURCE=1",
- "-DSKIA_IMPLEMENTATION=1",
- "-DATRACE_TAG=ATRACE_TAG_VIEW",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-
- export_include_dirs: [
- "include/android/",
-@@ -1603,8 +1605,11 @@ cc_test {
-
- cflags: [
- "-Wno-unused-parameter",
-+ CFLAGS_FOR_BENCH_SUITE
- ],
-
-+ ldflags: [LDFLAGS_FOR_BENCH_SUITE],
-+
- local_include_dirs: [
- "bench/",
- "experimental/svg/model/",
diff --git a/android_bench_suite/apply_patches.py b/android_bench_suite/apply_patches.py
deleted file mode 100755
index fbe1a8d..0000000
--- a/android_bench_suite/apply_patches.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Script to patch Android repo with diffs that are needed by the suite.
-
-Run this script before running the suite.
-"""
-from __future__ import print_function
-
-import config
-import os
-import subprocess
-
-# The patches to be added to the android repo.
-# An error may occur if it is already patched, or meets some error.
-# FIXME: Needs to be FIXED in the future.
-def try_patch_skia():
- skia_dir = os.path.join(config.android_home, config.bench_dict['Skia'])
- # You may want to change the file based on aosp or internal
- if config.android_type == 'internal':
- print('No need to patch skia for internal repo.')
- return
- elif config.android_type == 'aosp':
- skia_patch = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), 'skia_aosp.diff')
- else:
- raise ValueError('Adnroid source type should be either aosp or internal.')
- # FIXME: A quick hack, need to handle errors and check whether has been
- # applied in the future.
- try:
- subprocess.check_call(['git', '-C', skia_dir, 'apply', skia_patch])
- print('Skia patched successfully!')
- except subprocess.CalledProcessError:
- print('Skia patch not applied, error or already patched.')
-
-
-def try_patch_autotest():
- # Patch autotest, which includes all the testcases on device, setting device,
- # and running the benchmarks
- autotest_dir = os.path.join(config.android_home, config.autotest_dir)
- autotest_patch = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), 'autotest.diff')
- dex2oat_dir = os.path.join(autotest_dir, 'server/site_tests/android_Dex2oat')
- panorama_dir = os.path.join(autotest_dir,
- 'server/site_tests/android_Panorama')
- # FIXME: A quick hack, need to handle errors and check whether has been
- # applied in the future.
- try:
- subprocess.check_call(['git', '-C', autotest_dir, 'apply', autotest_patch])
- subprocess.check_call(['cp', '-rf', 'dex2oat_input', dex2oat_dir])
- subprocess.check_call(['cp', '-rf', 'panorama_input', panorama_dir])
- print('Autotest patched successfully!')
- except subprocess.CalledProcessError:
- print('Autotest patch not applied, error or already patched.')
-
-
-def try_patch_panorama():
- panorama_dir = os.path.join(config.android_home,
- config.bench_dict['Panorama'])
- panorama_patch = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), 'panorama.diff')
- # FIXME: A quick hack, need to handle errors and check whether has been
- # applied in the future.
- try:
- subprocess.check_call(['git', '-C', panorama_dir, 'apply', panorama_patch])
- print('Panorama patched successfully!')
- except subprocess.CalledProcessError:
- print('Panorama patch not applied, error or already patched.')
-
-
-def try_patch_synthmark():
- synthmark_dir = 'devrel/tools/synthmark'
- # FIXME: A quick hack, need to handle errors and check whether has been
- # applied in the future.
- try:
- subprocess.check_call([
- 'bash', '-c', 'mkdir devrel && '
- 'cd devrel && '
- 'repo init -u sso://devrel/manifest && '
- 'repo sync tools/synthmark'
- ])
- synthmark_patch = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), 'synthmark.diff')
- subprocess.check_call(['git', '-C', synthmark_dir,
- 'apply', synthmark_patch])
-
- subprocess.check_call(['mv', '-f', synthmark_dir, config.android_home])
- subprocess.check_call(['rm', '-rf', 'devrel'])
- print('Synthmark patched successfully!')
- except subprocess.CalledProcessError:
- print('Synthmark patch not applied, error or already patched.')
-
-
-def main():
- try_patch_skia()
- try_patch_autotest()
- try_patch_panorama()
- try_patch_synthmark()
-
-
-if __name__ == '__main__':
- main()
diff --git a/android_bench_suite/autotest.diff b/android_bench_suite/autotest.diff
deleted file mode 100644
index c2fed83..0000000
--- a/android_bench_suite/autotest.diff
+++ /dev/null
@@ -1,1057 +0,0 @@
-diff --git a/server/site_tests/android_Binder/android_Binder.py b/server/site_tests/android_Binder/android_Binder.py
-new file mode 100644
-index 000000000..b233b586a
---- /dev/null
-+++ b/server/site_tests/android_Binder/android_Binder.py
-@@ -0,0 +1,57 @@
-+# Tests for android Binder
-+from __future__ import print_function
-+
-+import bench_config
-+import logging
-+import os
-+import re
-+
-+from autotest_lib.server import test
-+
-+class android_Binder(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ out_dir = os.path.join(bench_config.android_home,
-+ 'out/target/product/' + bench_config.product)
-+
-+ # Set binary directories
-+ lib_dir = os.path.join(out_dir, 'system/lib/libbinder.so')
-+ lib_dir_DUT = '/system/lib/libbinder.so'
-+ lib64_dir = os.path.join(out_dir, 'system/lib64/libbinder.so')
-+ lib64_dir_DUT = '/system/lib64/libbinder.so'
-+ bench_dir = os.path.join(out_dir,
-+ 'symbols/data/nativetest64',
-+ 'binderThroughputTest/binderThroughputTest')
-+ bench_dir_DUT = os.path.join('/data/local/tmp',
-+ 'binderThroughputTest')
-+
-+ # Push binary to the device
-+ print('Pushing binaries of Binder benchmark onto device!')
-+ host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
-+ host.send_file(lib_dir, lib_dir_DUT, delete_dest=True)
-+ host.send_file(lib64_dir, lib64_dir_DUT, delete_dest=True)
-+
-+ # Make sure the binary is executable
-+ self.client.run('chmod u+x ' + bench_dir_DUT)
-+
-+ print('Running tests on the device...')
-+ # First run creates bench_result
-+ self.client.run('taskset %s /data/local/tmp/'
-+ 'binderThroughputTest > /data/local/tmp/bench_result'
-+ % os.getenv('TEST_MODE'))
-+ # Next 4 runs add to bench_result
-+ for i in range(4):
-+ self.client.run('taskset %s /data/local/tmp/'
-+ 'binderThroughputTest >> '
-+ '/data/local/tmp/bench_result'
-+ % os.getenv('TEST_MODE'))
-+
-+ # Pull result from the device
-+ out_dir = bench_config.bench_suite_dir
-+ result_dir_DUT = '/data/local/tmp/bench_result'
-+
-+ host.get_file(result_dir_DUT, out_dir, delete_dest=True)
-+ print('Result has been pulled back to file bench_result!')
-diff --git a/server/site_tests/android_Binder/bench_config.py b/server/site_tests/android_Binder/bench_config.py
-new file mode 100644
-index 000000000..20f685eb9
---- /dev/null
-+++ b/server/site_tests/android_Binder/bench_config.py
-@@ -0,0 +1,19 @@
-+#!/bin/bash/python
-+import os
-+
-+home = os.environ["HOME"]
-+
-+android_home = os.getenv("ANDROID_HOME",
-+ default=os.path.join(home,
-+ 'android_source/master-googleplex/'))
-+bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
-+ default=os.path.join(android_home,
-+ 'benchtoolchain'))
-+
-+synthmark_dir = 'framework/native/libs/binder'
-+
-+real_synthmark_dir = os.path.join(android_home, synthmark_dir)
-+
-+out_dir = os.path.join(android_home, 'out')
-+
-+product = os.getenv("PRODUCT", default="generic")
-diff --git a/server/site_tests/android_Binder/control b/server/site_tests/android_Binder/control
-new file mode 100644
-index 000000000..d91854b11
---- /dev/null
-+++ b/server/site_tests/android_Binder/control
-@@ -0,0 +1,19 @@
-+#Control
-+
-+NAME = "Binder"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "MEDIUM"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "application"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+
-+"""
-+
-+def run_binder_test(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_Binder", host=host)
-+
-+parallel_simple(run_binder_test, machines)
-diff --git a/server/site_tests/android_Dex2oat/android_Dex2oat.py b/server/site_tests/android_Dex2oat/android_Dex2oat.py
-new file mode 100644
-index 000000000..dd6af0b53
---- /dev/null
-+++ b/server/site_tests/android_Dex2oat/android_Dex2oat.py
-@@ -0,0 +1,70 @@
-+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-+# Use of this source code is governed by a BSD-style license that can be
-+# found in the LICENSE file.
-+
-+import bench_config
-+import time
-+import logging
-+import os
-+import re
-+
-+from autotest_lib.client.common_lib import error
-+from autotest_lib.server import test
-+
-+class android_Dex2oat(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ out_dir = os.path.join(bench_config.android_home,
-+ 'out/target/product/',
-+ bench_config.product)
-+
-+ # Set binary directories
-+ bench_dir = os.path.join(out_dir, 'system/lib/libart-compiler.so')
-+ bench_dir_DUT = '/system/lib/libart-compiler.so'
-+ bench64_dir = os.path.join(out_dir, 'system/lib64/libart-compiler.so')
-+ bench64_dir_DUT = '/system/lib64/libart-compiler.so'
-+
-+ # Push libart-compiler.so to the device
-+ print('Pushing binaries of newly generated library onto device!')
-+ host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
-+ host.send_file(bench64_dir, bench64_dir_DUT, delete_dest=True)
-+
-+ # Set testcase directories
-+ test_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
-+ 'dex2oat_input')
-+ test_dir_DUT = '/data/local/tmp/'
-+
-+ # Push testcases to the device
-+ print('Pushing tests onto device!')
-+ host.send_file(test_dir, test_dir_DUT, delete_dest=True)
-+
-+ # Open file to write the result
-+ with open(os.path.join(bench_config.bench_suite_dir,
-+ 'bench_result'), 'w') as f:
-+
-+ # There are two benchmarks, chrome and camera.
-+ for i in range(2):
-+ f.write('Test %d:\n' % i)
-+ total_time = 0
-+ # Run benchmark for several times for accurancy
-+ for j in range(3):
-+ f.write('Iteration %d: ' % j)
-+ result = self.client.run('time taskset %s dex2oat'
-+ ' --dex-file=data/local/tmp/dex2oat_input/test%d.apk'
-+ ' --oat-file=data/local/tmp/dex2oat_input/test%d.oat'
-+ % (os.getenv('TEST_MODE'), i+1, i+1))
-+ # Find and record real time of the run
-+ time_str = ''
-+ for t in result.stdout.split() + result.stderr.split():
-+ if 'm' in t and 's' in t:
-+ time_str = t.split('m')
-+ break
-+ time_sec = float(time_str[0]) * 60
-+ time_sec += float(time_str[1].split('s')[0])
-+ f.write('User Time: %.2f seconds\n' % time_sec)
-+ total_time += time_sec
-+
-+ f.write('Total elapsed time: %.2f seconds.\n\n' % total_time)
-diff --git a/server/site_tests/android_Dex2oat/bench_config.py b/server/site_tests/android_Dex2oat/bench_config.py
-new file mode 100644
-index 000000000..d2855f22c
---- /dev/null
-+++ b/server/site_tests/android_Dex2oat/bench_config.py
-@@ -0,0 +1,15 @@
-+#!/bin/bash/python
-+import os
-+
-+home = os.environ["HOME"]
-+
-+android_home = os.getenv("ANDROID_HOME",
-+ default=os.path.join(home,
-+ 'android_source/master-googleplex/'))
-+bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
-+ default=os.path.join(android_home,
-+ 'benchtoolchain'))
-+
-+out_dir = os.path.join(android_home, 'out')
-+
-+product = os.getenv("PRODUCT", default="generic")
-diff --git a/server/site_tests/android_Dex2oat/control b/server/site_tests/android_Dex2oat/control
-new file mode 100644
-index 000000000..763864f3a
---- /dev/null
-+++ b/server/site_tests/android_Dex2oat/control
-@@ -0,0 +1,21 @@
-+# Copyright (c) 2012 The Chromium OS Authors. All rights reserved.
-+# Use of this source code is governed by a BSD-style license that can be
-+# found in the LICENSE file.
-+
-+NAME = "Dex2oat"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "SHORT"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "kernel"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+
-+"""
-+
-+def run_dex2oat(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_Dex2oat", host=host)
-+
-+parallel_simple(run_dex2oat, machines)
-diff --git a/server/site_tests/android_Hwui/android_Hwui.py b/server/site_tests/android_Hwui/android_Hwui.py
-new file mode 100644
-index 000000000..d1837e042
---- /dev/null
-+++ b/server/site_tests/android_Hwui/android_Hwui.py
-@@ -0,0 +1,67 @@
-+# Tests for android Hwui
-+from __future__ import print_function
-+
-+import bench_config
-+import logging
-+import os
-+import re
-+
-+from autotest_lib.server import test
-+
-+class android_Hwui(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ out_dir = os.path.join(bench_config.android_home,
-+ 'out/target/product/' + bench_config.product)
-+
-+ lib_dir = os.path.join(out_dir, 'system/lib/libhwui.so')
-+ lib_dir_DUT = '/system/lib/libhwui.so'
-+ lib64_dir = os.path.join(out_dir, 'system/lib64/libhwui.so')
-+ lib64_dir_DUT = '/system/lib64/libhwui.so'
-+ bench_dir = os.path.join(out_dir,
-+ 'symbols/data/nativetest64/',
-+ 'hwuimicro/hwuimicro')
-+ bench_dir_DUT = '/data/local/tmp/hwuimicro'
-+
-+ # Push binary to the device
-+ print('Pushing Hwui benchmark onto device!')
-+ host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
-+ host.send_file(lib_dir, lib_dir_DUT, delete_dest=True)
-+ host.send_file(lib64_dir, lib64_dir_DUT, delete_dest=True)
-+
-+ # Make sure the binary is executable
-+ self.client.run('chmod u+x ' + bench_dir_DUT)
-+
-+
-+ print('Running tests on the device...')
-+ self.client.run('taskset %s /data/local/tmp/hwuimicro'
-+ ' > /data/local/tmp/bench_result'
-+ % os.getenv('TEST_MODE'))
-+
-+ # Pull result from the device
-+ out_dir = bench_config.bench_suite_dir
-+ result_dir_DUT = '/data/local/tmp/bench_result'
-+
-+ host.get_file(result_dir_DUT, out_dir, delete_dest=True)
-+
-+ # Update total time of the test
-+ t = 0
-+ with open(os.path.join(out_dir, 'bench_result'), 'r') as fin:
-+
-+ for lines in fin:
-+ line = lines.split()
-+ print(line)
-+
-+ # Check if there is test result in this line
-+ if len(line) == 8:
-+ # Accumulate the Run time for the testcase
-+ t += int(line[2])
-+
-+ # Append total time to the file
-+ with open(os.path.join(out_dir, 'bench_result'), 'a') as fout:
-+ fout.write('\nTotal elapsed time: %d ns.\n' % t)
-+
-+ print('Result has been pulled back to file bench_result!')
-diff --git a/server/site_tests/android_Hwui/bench_config.py b/server/site_tests/android_Hwui/bench_config.py
-new file mode 100644
-index 000000000..a98d259f9
---- /dev/null
-+++ b/server/site_tests/android_Hwui/bench_config.py
-@@ -0,0 +1,19 @@
-+#!/bin/bash/python
-+import os
-+
-+home = os.environ["HOME"]
-+
-+android_home = os.getenv("ANDROID_HOME",
-+ default=os.path.join(home,
-+ 'android_source/master-googleplex/'))
-+bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
-+ default=os.path.join(android_home,
-+ 'benchtoolchain'))
-+
-+hwui_dir = 'frameworks/base/libs/hwui/'
-+
-+real_hwui_dir = os.path.join(android_home, hwui_dir)
-+
-+out_dir = os.path.join(android_home, 'out')
-+
-+product = os.getenv("PRODUCT", default="generic")
-diff --git a/server/site_tests/android_Hwui/control b/server/site_tests/android_Hwui/control
-new file mode 100644
-index 000000000..89c47da20
---- /dev/null
-+++ b/server/site_tests/android_Hwui/control
-@@ -0,0 +1,19 @@
-+#Control
-+
-+NAME = "Hwui"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "MEDIUM"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "library"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+
-+"""
-+
-+def run_hwui_test(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_Hwui", host=host)
-+
-+parallel_simple(run_hwui_test, machines)
-diff --git a/server/site_tests/android_Panorama/android_Panorama.py b/server/site_tests/android_Panorama/android_Panorama.py
-new file mode 100644
-index 000000000..db2a29cde
---- /dev/null
-+++ b/server/site_tests/android_Panorama/android_Panorama.py
-@@ -0,0 +1,53 @@
-+# Tests for android Panorama
-+from __future__ import print_function
-+
-+import bench_config
-+import logging
-+import os
-+import re
-+
-+from autotest_lib.server import test
-+
-+class android_Panorama(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ out_dir = os.path.join(bench_config.android_home,
-+ 'out/target/product/' + bench_config.product)
-+
-+ # Set binary directories
-+ bench_dir = os.path.join(out_dir,
-+ 'data/local/tmp/panorama_bench64')
-+ bench_dir_DUT = '/data/local/tmp/panorama_bench64'
-+
-+ # Set tests directories
-+ tests_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
-+ 'panorama_input')
-+ tests_dir_DUT = '/data/local/tmp/panorama_input/'
-+
-+ # Push binary to the device
-+ print('Pushing binaries of Panorama benchmark onto device!')
-+ host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
-+
-+ # Make sure the binary is executable
-+ self.client.run('chmod u+x ' + bench_dir_DUT)
-+
-+ # Push testcases to the device
-+ print('Pushing tests onto device!')
-+ host.send_file(tests_dir, tests_dir_DUT, delete_dest=True)
-+
-+ print('Running tests on the device...')
-+ self.client.run('taskset %s /data/local/tmp/panorama_bench64 '
-+ '/data/local/tmp/panorama_input/panorama_input/test '
-+ '/data/local/tmp/panorama.ppm'
-+ ' > /data/local/tmp/bench_result'
-+ % os.getenv('TEST_MODE'))
-+
-+ # Pull result from the device
-+ out_dir = bench_config.bench_suite_dir
-+ result_dir_DUT = '/data/local/tmp/bench_result'
-+
-+ host.get_file(result_dir_DUT, out_dir, delete_dest=True)
-+ print('Result has been pulled back to file bench_result!')
-diff --git a/server/site_tests/android_Panorama/bench_config.py b/server/site_tests/android_Panorama/bench_config.py
-new file mode 100644
-index 000000000..075beec76
---- /dev/null
-+++ b/server/site_tests/android_Panorama/bench_config.py
-@@ -0,0 +1,19 @@
-+#!/bin/bash/python
-+import os
-+
-+home = os.environ["HOME"]
-+
-+android_home = os.getenv("ANDROID_HOME",
-+ default=os.path.join(home,
-+ 'android_source/master-googleplex/'))
-+bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
-+ default=os.path.join(android_home,
-+ 'benchtoolchain'))
-+
-+panorama_dir = 'perftests/panorama/'
-+
-+real_panorama_dir = os.path.join(android_home, panorama_dir)
-+
-+out_dir = os.path.join(android_home, 'out')
-+
-+product = os.getenv("PRODUCT", default="generic")
-diff --git a/server/site_tests/android_Panorama/control b/server/site_tests/android_Panorama/control
-new file mode 100644
-index 000000000..3cd589eed
---- /dev/null
-+++ b/server/site_tests/android_Panorama/control
-@@ -0,0 +1,19 @@
-+#Control
-+
-+NAME = "Panorama"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "MEDIUM"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "application"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+
-+"""
-+
-+def run_panorama_test(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_Panorama", host=host)
-+
-+parallel_simple(run_panorama_test, machines)
-diff --git a/server/site_tests/android_SetDevice/android_SetDevice.py b/server/site_tests/android_SetDevice/android_SetDevice.py
-new file mode 100644
-index 000000000..7a7134d58
---- /dev/null
-+++ b/server/site_tests/android_SetDevice/android_SetDevice.py
-@@ -0,0 +1,77 @@
-+# Set device modes such as cpu frequency
-+from __future__ import print_function
-+
-+import logging
-+import os
-+import re
-+import time
-+
-+from autotest_lib.server import test
-+
-+def _get_cat_value(result):
-+ return result.stdout.split('\n')[0]
-+
-+class android_SetDevice(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ # Disable GPU
-+ self.client.run('setprop debug.rs.default-GPU-driver 1')
-+
-+ # Freeze system
-+ # Stop perfd, mpdecision and thermal-engine to ensure setting runs
-+ # without unexpected errors.
-+ self.client.run('stop thermal-engine')
-+ self.client.run('stop mpdecision')
-+ self.client.run('stop perfd')
-+
-+ # Set airplane mode on the device
-+ self.client.run('settings put global airplane_mode_on 1')
-+
-+ print('Setting frequency on the device...')
-+ frequency = os.getenv('FREQUENCY')
-+
-+ # Get number of cores on device
-+ result = self.client.run('ls /sys/devices/system/cpu/ '
-+ '| grep cpu[0-9].*')
-+ cores = result.stdout.splitlines()
-+ for core in cores:
-+ if core.startswith('cpu'):
-+ # First set all cores online
-+ online = os.path.join('/sys/devices/system/cpu', core, 'online')
-+ online_status = _get_cat_value(self.client.run('cat %s' % online))
-+ if online_status == '0':
-+ self.client.run('echo %s > %s' % ('1', online))
-+
-+ freq_path = os.path.join('/sys/devices/system/cpu', core,
-+ 'cpufreq')
-+
-+ # Check if the frequency user entered is legal or not.
-+ available_freq = self.client.run('cat %s/'
-+ 'scaling_available_frequencies'
-+ % (freq_path))
-+ available_freq_list = _get_cat_value(available_freq).split()
-+
-+ if frequency not in available_freq_list:
-+ raise ValueError('Wrong freqeuncy input, '
-+ 'please select from: \n%s'
-+ % (' '.join(available_freq_list)))
-+
-+ # Set frequency
-+ self.client.run('echo %s > %s/scaling_min_freq'
-+ % (frequency, freq_path))
-+ self.client.run('echo %s > %s/scaling_max_freq'
-+ % (frequency, freq_path))
-+
-+ # Sleep for 2 seconds, let device update the frequency.
-+ time.sleep(2)
-+
-+ # Get current frequency
-+ freq = self.client.run('cat %s/cpuinfo_cur_freq' % freq_path)
-+ f = _get_cat_value(freq)
-+ if f != frequency:
-+ raise RuntimeError('Expected frequency for %s to be %s, '
-+ 'but is %s' % (core, frequency, f))
-+ print('CPU frequency has been set to %s' % (frequency))
-diff --git a/server/site_tests/android_SetDevice/control b/server/site_tests/android_SetDevice/control
-new file mode 100644
-index 000000000..85163706d
---- /dev/null
-+++ b/server/site_tests/android_SetDevice/control
-@@ -0,0 +1,19 @@
-+# Control
-+
-+NAME = "SetDevice"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "MEDIUM"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "application"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+Set the core frequency and which core online for devices.
-+"""
-+
-+def run_set_device_test(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_SetDevice", host=host)
-+
-+parallel_simple(run_set_device_test, machines)
-diff --git a/server/site_tests/android_Skia/android_Skia.py b/server/site_tests/android_Skia/android_Skia.py
-new file mode 100644
-index 000000000..fc8d09dab
---- /dev/null
-+++ b/server/site_tests/android_Skia/android_Skia.py
-@@ -0,0 +1,65 @@
-+# Tests for android Skia
-+from __future__ import print_function
-+
-+import bench_config
-+import logging
-+import os
-+import re
-+
-+from autotest_lib.server import test
-+
-+class android_Skia(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ out_dir = os.path.join(bench_config.android_home,
-+ 'out/target/product/' + bench_config.product)
-+
-+ # Set binary directories
-+ lib_dir = os.path.join(out_dir, 'system/lib/libskia.so')
-+ lib_dir_DUT = '/system/lib/libskia.so'
-+ lib64_dir = os.path.join(out_dir, 'system/lib64/libskia.so')
-+ lib64_dir_DUT = '/system/lib64/libskia.so'
-+ bench_dir = os.path.join(out_dir,
-+ 'data/nativetest64/',
-+ 'skia_nanobench/skia_nanobench')
-+ bench_dir_DUT = '/data/local/tmp/skia_nanobench'
-+
-+ # Push binary to the device
-+ print('Pushing Skia benchmark onto device!')
-+ host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
-+ host.send_file(lib_dir, lib_dir_DUT, delete_dest=True)
-+ host.send_file(lib64_dir, lib64_dir_DUT, delete_dest=True)
-+
-+ # Make sure the binary is executable
-+ self.client.run('chmod u+x ' + bench_dir_DUT)
-+
-+ # Set resource directory
-+ resource_dir = os.path.join(bench_config.real_skia_dir, 'resources')
-+ resource_dir_DUT = '/data/local/tmp/skia_resources/'
-+
-+ # Push binary to the device
-+ print('Pushing Skia resources onto device!')
-+ host.send_file(resource_dir, resource_dir_DUT, delete_dest=True)
-+
-+ # Run tests
-+ print('Running tests on the device...')
-+ try:
-+ self.client.run('taskset %s ./data/local/tmp/skia_nanobench'
-+ ' --outResultsFile /data/local/tmp/bench_result'
-+ ' --samples 25'
-+ ' --config nonrendering'
-+ % os.getenv('TEST_MODE'))
-+ except:
-+ # Ignore Abort caused failure
-+ None
-+
-+ # Pull result from the device
-+ out_dir = bench_config.bench_suite_dir
-+ result_dir_DUT = '/data/local/tmp/bench_result'
-+
-+ host.get_file(result_dir_DUT, out_dir, delete_dest=True)
-+
-+ print('Result has been pulled back to file bench_result!')
-diff --git a/server/site_tests/android_Skia/bench_config.py b/server/site_tests/android_Skia/bench_config.py
-new file mode 100644
-index 000000000..5d38d452f
---- /dev/null
-+++ b/server/site_tests/android_Skia/bench_config.py
-@@ -0,0 +1,19 @@
-+#!/bin/bash/python
-+import os
-+
-+home = os.environ["HOME"]
-+
-+android_home = os.getenv("ANDROID_HOME",
-+ default=os.path.join(home,
-+ 'android_source/master-googleplex/'))
-+bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
-+ default=os.path.join(android_home,
-+ 'benchtoolchain'))
-+
-+skia_dir = 'external/skia'
-+
-+real_skia_dir = os.path.join(android_home, skia_dir)
-+
-+out_dir = os.path.join(android_home, 'out')
-+
-+product = os.getenv("PRODUCT", default="generic")
-diff --git a/server/site_tests/android_Skia/control b/server/site_tests/android_Skia/control
-new file mode 100644
-index 000000000..e38195a8c
---- /dev/null
-+++ b/server/site_tests/android_Skia/control
-@@ -0,0 +1,19 @@
-+#Control
-+
-+NAME = "Skia"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "MEDIUM"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "library"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+
-+"""
-+
-+def run_skia_test(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_Skia", host=host)
-+
-+parallel_simple(run_skia_test, machines)
-diff --git a/server/site_tests/android_Synthmark/android_Synthmark.py b/server/site_tests/android_Synthmark/android_Synthmark.py
-new file mode 100644
-index 000000000..b317bd0f3
---- /dev/null
-+++ b/server/site_tests/android_Synthmark/android_Synthmark.py
-@@ -0,0 +1,48 @@
-+# Tests for android Synthmark
-+from __future__ import print_function
-+
-+import bench_config
-+import logging
-+import os
-+import re
-+
-+from autotest_lib.server import test
-+
-+class android_Synthmark(test.test):
-+ version = 1
-+
-+ def run_once(self, host=None):
-+ self.client = host
-+
-+ out_dir = os.path.join(bench_config.android_home,
-+ 'out/target/product/' + bench_config.product)
-+
-+ # Set binary directories
-+ bench_dir = os.path.join(out_dir,
-+ 'symbols/system/bin/synthmark')
-+ bench_dir_DUT = '/data/local/tmp/synthmark'
-+
-+ # Push binary to the device
-+ print('Pushing binaries of Synthmark benchmark onto device!')
-+ host.send_file(bench_dir, bench_dir_DUT, delete_dest=True)
-+
-+ # Make sure the binary is executable
-+ self.client.run('chmod u+x ' + bench_dir_DUT)
-+
-+ print('Running tests on the device...')
-+ # First run creates bench_result
-+ self.client.run('taskset %s /data/local/tmp/synthmark'
-+ ' > /data/local/tmp/bench_result'
-+ % os.getenv('TEST_MODE'))
-+ # Next 4 runs add to bench_result
-+ for i in range(4):
-+ self.client.run('taskset %s /data/local/tmp/synthmark'
-+ ' >> /data/local/tmp/bench_result'
-+ % os.getenv('TEST_MODE'))
-+
-+ # Pull result from the device
-+ out_dir = bench_config.bench_suite_dir
-+ result_dir_DUT = '/data/local/tmp/bench_result'
-+
-+ host.get_file(result_dir_DUT, out_dir, delete_dest=True)
-+ print('Result has been pulled back to file bench_result!')
-diff --git a/server/site_tests/android_Synthmark/bench_config.py b/server/site_tests/android_Synthmark/bench_config.py
-new file mode 100644
-index 000000000..7d7aacacd
---- /dev/null
-+++ b/server/site_tests/android_Synthmark/bench_config.py
-@@ -0,0 +1,19 @@
-+#!/bin/bash/python
-+import os
-+
-+home = os.environ["HOME"]
-+
-+android_home = os.getenv("ANDROID_HOME",
-+ default=os.path.join(home,
-+ 'android_source/master-googleplex/'))
-+bench_suite_dir = os.getenv('BENCH_SUITE_DIR',
-+ default=os.path.join(android_home,
-+ 'benchtoolchain'))
-+
-+synthmark_dir = 'synthmark'
-+
-+real_synthmark_dir = os.path.join(android_home, synthmark_dir)
-+
-+out_dir = os.path.join(android_home, 'out')
-+
-+product = os.getenv("PRODUCT", default="generic")
-diff --git a/server/site_tests/android_Synthmark/control b/server/site_tests/android_Synthmark/control
-new file mode 100644
-index 000000000..144766351
---- /dev/null
-+++ b/server/site_tests/android_Synthmark/control
-@@ -0,0 +1,19 @@
-+#Control
-+
-+NAME = "Synthmark"
-+AUTHOR = "Zhizhou Yang"
-+ATTRIBUTES = "suite:android_toolchain_benchmark"
-+TIME = "MEDIUM"
-+TEST_CATEGORY = "Functional"
-+TEST_CLASS = "application"
-+TEST_TYPE = "server"
-+
-+DOC = """
-+
-+"""
-+
-+def run_synthmark_test(machine):
-+ host = hosts.create_host(machine)
-+ job.run_test("android_Synthmark", host=host)
-+
-+parallel_simple(run_synthmark_test, machines)
-diff --git a/site_utils/set_device.py b/site_utils/set_device.py
-new file mode 100755
-index 000000000..abb8a8dcc
---- /dev/null
-+++ b/site_utils/set_device.py
-@@ -0,0 +1,110 @@
-+#!/usr/bin/python
-+from __future__ import print_function
-+
-+import argparse
-+import common
-+import logging
-+import os
-+import sys
-+
-+# Turn the logging level to INFO before importing other autotest code, to avoid
-+# having failed import logging messages confuse the test_droid user.
-+logging.basicConfig(level=logging.INFO)
-+
-+# Unfortunately, autotest depends on external packages for assorted
-+# functionality regardless of whether or not it is needed in a particular
-+# context. Since we can't depend on people to import these utilities in any
-+# principled way, we dynamically download code before any autotest imports.
-+try:
-+ import chromite.lib.terminal # pylint: disable=unused-import
-+ import django.http # pylint: disable=unused-import
-+except ImportError:
-+ # Ensure the chromite site-package is installed.
-+ import subprocess
-+ build_externals_path = os.path.join(
-+ os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-+ 'utils', 'build_externals.py')
-+ subprocess.check_call([build_externals_path, '--names_to_check',
-+ 'chromiterepo', 'django'])
-+ # Restart the script so python now finds the autotest site-packages.
-+ sys.exit(os.execv(__file__, sys.argv))
-+
-+from autotest_lib.client.common_lib import utils
-+from autotest_lib.server.hosts import adb_host
-+from autotest_lib.site_utils import test_runner_utils
-+from autotest_lib.site_utils import tester_feedback
-+
-+def _parse_arguments_internal(argv):
-+ """
-+ Parse command line arguments
-+
-+ @param argv: argument list to parse
-+
-+ @returns: tuple of parsed arguments and argv suitable for remote runs
-+
-+ @raises SystemExit if arguments are malformed, or required arguments
-+ are not present.
-+ """
-+
-+ parser = argparse.ArgumentParser(description='Set device cpu cores and '
-+ 'frequency.')
-+
-+ parser.add_argument('-s', '--serials', metavar='SERIALS',
-+ help='Comma separate list of device serials under '
-+ 'test.')
-+ parser.add_argument('-r', '--remote', metavar='REMOTE',
-+ default='localhost',
-+ help='hostname[:port] if the ADB device is connected '
-+ 'to a remote machine. Ensure this workstation '
-+ 'is configured for passwordless ssh access as '
-+ 'users "root" or "adb"')
-+ parser.add_argument('-q', '--frequency', type=int, default=960000,
-+ help='Specify the CPU frequency of the device, lower '
-+ 'frequency will slow down the performance but '
-+ 'reduce noise.')
-+
-+ return parser.parse_args(argv)
-+
-+def main(argv):
-+ """
-+ Entry point for set_device script.
-+
-+ @param argv: arguments list
-+ """
-+ arguments = _parse_arguments_internal(argv)
-+
-+ serials = arguments.serials
-+ if serials is None:
-+ result = utils.run(['adb', 'devices'])
-+ devices = adb_host.ADBHost.parse_device_serials(result.stdout)
-+ if len(devices) != 1:
-+ logging.error('Could not detect exactly one device; please select '
-+ 'one with -s: %s', devices)
-+ return 1
-+ serials = devices[0]
-+
-+ autotest_path = os.path.dirname(os.path.dirname(
-+ os.path.realpath(__file__)))
-+ site_utils_path = os.path.join(autotest_path, 'site_utils')
-+ realpath = os.path.realpath(__file__)
-+ site_utils_path = os.path.realpath(site_utils_path)
-+ host_attributes = {'serials': serials,
-+ 'os_type': 'android'}
-+ results_directory = test_runner_utils.create_results_directory(None)
-+
-+ logging.info('Start setting CPU frequency on the device...')
-+
-+ os.environ['FREQUENCY'] = str(arguments.frequency)
-+
-+ set_device = ['SetDevice']
-+ if test_runner_utils.perform_run_from_autotest_root(
-+ autotest_path, argv, set_device, arguments.remote,
-+ host_attributes=host_attributes,
-+ results_directory=results_directory):
-+ logging.error('Error while setting device!')
-+ return 1
-+
-+ return 0
-+
-+if __name__ == '__main__':
-+ sys.exit(main(sys.argv[1:]))
-diff --git a/site_utils/test_bench.py b/site_utils/test_bench.py
-new file mode 100755
-index 000000000..4d0773ad9
---- /dev/null
-+++ b/site_utils/test_bench.py
-@@ -0,0 +1,133 @@
-+#!/usr/bin/python
-+from __future__ import print_function
-+
-+import argparse
-+import common
-+import logging
-+import os
-+import sys
-+
-+# Turn the logging level to INFO before importing other autotest
-+# code, to avoid having failed import logging messages confuse the
-+# test_droid user.
-+logging.basicConfig(level=logging.INFO)
-+
-+# Unfortunately, autotest depends on external packages for assorted
-+# functionality regardless of whether or not it is needed in a particular
-+# context.
-+# Since we can't depend on people to import these utilities in any principled
-+# way, we dynamically download code before any autotest imports.
-+try:
-+ import chromite.lib.terminal # pylint: disable=unused-import
-+ import django.http # pylint: disable=unused-import
-+except ImportError:
-+ # Ensure the chromite site-package is installed.
-+ import subprocess
-+ build_externals_path = os.path.join(
-+ os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
-+ 'utils', 'build_externals.py')
-+ subprocess.check_call([build_externals_path, '--names_to_check',
-+ 'chromiterepo', 'django'])
-+ # Restart the script so python now finds the autotest site-packages.
-+ sys.exit(os.execv(__file__, sys.argv))
-+
-+from autotest_lib.client.common_lib import utils
-+from autotest_lib.server.hosts import adb_host
-+from autotest_lib.site_utils import test_runner_utils
-+from autotest_lib.site_utils import tester_feedback
-+
-+def _parse_arguments_internal(argv):
-+ """
-+ Parse command line arguments
-+
-+ @param argv: argument list to parse
-+
-+ @returns: tuple of parsed arguments and argv suitable for remote runs
-+
-+ @raises SystemExit if arguments are malformed, or required arguments
-+ are not present.
-+ """
-+
-+ parser = argparse.ArgumentParser(description='Run remote tests.')
-+
-+ parser.add_argument('-b', '--bench', metavar='BENCH', required=True,
-+ help='Select the benchmark want to be run for '
-+ 'test.')
-+ parser.add_argument('-s', '--serials', metavar='SERIALS',
-+ help='Comma separate list of device serials under '
-+ 'test.')
-+ parser.add_argument('-r', '--remote', metavar='REMOTE',
-+ default='localhost',
-+ help='hostname[:port] if the ADB device is connected '
-+ 'to a remote machine. Ensure this workstation '
-+ 'is configured for passwordless ssh access as '
-+ 'users "root" or "adb"')
-+ parser.add_argument('-m', '--mode', default='little',
-+ help='Two modes can be chosen, little mode runs on a '
-+ 'single core of Cortex-A53, while big mode runs '
-+ 'on single core of Cortex-A57.')
-+
-+ return parser.parse_args(argv)
-+
-+def main(argv):
-+ """
-+ Entry point for test_bench script.
-+
-+ @param argv: arguments list
-+ """
-+ arguments = _parse_arguments_internal(argv)
-+
-+ serials = arguments.serials
-+ if serials is None:
-+ result = utils.run(['adb', 'devices'])
-+ devices = adb_host.ADBHost.parse_device_serials(result.stdout)
-+ if len(devices) != 1:
-+ logging.error('Could not detect exactly one device; please select '
-+ 'one with -s: %s', devices)
-+ return 1
-+ serials = devices[0]
-+
-+ autotest_path = os.path.dirname(os.path.dirname(
-+ os.path.realpath(__file__)))
-+ site_utils_path = os.path.join(autotest_path, 'site_utils')
-+ realpath = os.path.realpath(__file__)
-+ site_utils_path = os.path.realpath(site_utils_path)
-+ host_attributes = {'serials': serials,
-+ 'os_type': 'android'}
-+ results_directory = test_runner_utils.create_results_directory(None)
-+
-+ bench = arguments.bench
-+
-+ benchlist = ['Panorama', 'Skia', 'Dex2oat', 'Hwui', "Synthmark", "Binder"]
-+
-+ logging.info('Start testing benchmark on the device...')
-+
-+ if bench not in benchlist:
-+ logging.error('Please select one benchmark from the list below: \n%s',
-+ '\n'.join(benchlist))
-+ return 1
-+
-+ # Use taskset command to run benchmarks with different CPU core settings.
-+ #
-+ # TEST_MODE variable is set to either 7 or 56 for coremask in taskset.
-+ #
-+ # While Nexus 6P has 8 cores and 5X has 6 cores. CPU number 0-3 in both
-+ # devices belongs to Cortex 53, which are slow. CPU number 4-5 in 5X and 4-7
-+ # in 6P belongs to Cortex 57, which are fast.
-+ #
-+ # So we set 7(0x00000111) for little mode, that runs the benchmark on three
-+ # slow cores; 56(0x00111000) for big mode, that runs the benchmark on two
-+ # fast and one slow cores.
-+ os.environ['TEST_MODE'] = '7' if arguments.mode == 'little' else '56'
-+
-+ tests = [bench]
-+
-+ if test_runner_utils.perform_run_from_autotest_root(
-+ autotest_path, argv, tests, arguments.remote,
-+ host_attributes=host_attributes,
-+ results_directory=results_directory):
-+ logging.error('Error while testing on device.')
-+ return 1
-+
-+if __name__ == '__main__':
-+ sys.exit(main(sys.argv[1:]))
diff --git a/android_bench_suite/build_bench.py b/android_bench_suite/build_bench.py
deleted file mode 100755
index 44ad7a0..0000000
--- a/android_bench_suite/build_bench.py
+++ /dev/null
@@ -1,228 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# pylint: disable=cros-logging-import
-
-"""Script to build the benchmark locally with toolchain settings."""
-from __future__ import print_function
-
-import argparse
-import config
-import logging
-import os
-import subprocess
-import sys
-
-# Turn the logging level to INFO before importing other code, to avoid having
-# failed import logging messages confuse the user.
-logging.basicConfig(level=logging.INFO)
-
-
-def _parse_arguments_internal(argv):
- parser = argparse.ArgumentParser(description='Build benchmarks with '
- 'specified toolchain settings')
-
- parser.add_argument(
- '-b', '--bench', required=True, help='Select the benchmark to be built.')
-
- parser.add_argument(
- '-c',
- '--compiler_dir',
- metavar='DIR',
- help='Specify the path to the compiler bin '
- 'directory.')
-
- parser.add_argument(
- '-o', '--build_os', help='Specify the host OS to build benchmark.')
-
- parser.add_argument(
- '-l',
- '--llvm_prebuilts_version',
- help='Specify the version of prebuilt LLVM.')
-
- parser.add_argument(
- '-f',
- '--cflags',
- help='Specify the optimization cflags for '
- 'the toolchain.')
-
- parser.add_argument(
- '--ldflags', help='Specify linker flags for the toolchain.')
-
- return parser.parse_args(argv)
-
-
-# Set flags for compiling benchmarks, by changing the local
-# CFLAGS/LDFLAGS in the android makefile of each benchmark
-def set_flags(bench, cflags, ldflags):
- if not cflags:
- logging.info('No CFLAGS specified, using default settings.')
- cflags = ''
- else:
- logging.info('Cflags setting to "%s"...', cflags)
-
- if not ldflags:
- logging.info('No LDFLAGS specifed, using default settings.')
- ldflags = ''
- else:
- logging.info('Ldflags setting to "%s"...', ldflags)
-
- add_flags = config.bench_flags_dict[bench]
- add_flags(cflags, ldflags)
- logging.info('Flags set successfully!')
-
-
-def set_build_os(build_os):
- # Set $BUILD_OS variable for android makefile
- if build_os:
- os.environ['BUILD_OS'] = build_os
- logging.info('BUILD_OS set to "%s"...', build_os)
- else:
- logging.info('No BUILD_OS specified, using linux as default...')
-
-
-def set_llvm_prebuilts_version(llvm_prebuilts_version):
- # Set $LLVM_PREBUILTS_VERSION for android makefile
- if llvm_prebuilts_version:
- os.environ['LLVM_PREBUILTS_VERSION'] = llvm_prebuilts_version
- logging.info('LLVM_PREBUILTS_VERSION set to "%s"...',
- llvm_prebuilts_version)
- else:
- logging.info('No LLVM_PREBUILTS_VERSION specified, using default one...')
-
-
-def set_compiler(compiler):
- # If compiler_dir has been specified, copy the binaries to
- # a temporary location, set BUILD_OS and LLVM_PREBUILTS_VERSION
- # variables to the location
- if compiler:
- # Report error if path not exits
- if not os.path.isdir(compiler):
- logging.error('Error while setting compiler: '
- 'Directory %s does not exist!', compiler)
- raise OSError('Directory %s not exist.' % compiler)
-
- # Specify temporary directory for compiler
- tmp_dir = os.path.join(config.android_home,
- 'prebuilts/clang/host/linux-x86', 'clang-tmp')
-
- compiler_content = os.path.join(compiler, '.')
-
- # Copy compiler to new directory
- try:
- subprocess.check_call(['cp', '-rf', compiler_content, tmp_dir])
- except subprocess.CalledProcessError:
- logging.error('Error while copying the compiler to '
- 'temporary directory %s!', tmp_dir)
- raise
-
- # Set environment variable
- os.environ['LLVM_PREBUILTS_VERSION'] = 'clang-tmp'
-
- logging.info('Prebuilt Compiler set as %s.', os.path.abspath(compiler))
-
-
-def set_compiler_env(bench, compiler, build_os, llvm_prebuilts_version, cflags,
- ldflags):
- logging.info('Setting compiler options for benchmark...')
-
- # If no specific prebuilt compiler directory, use BUILD_OS and
- # LLVM_PREBUILTS_VERSION to set the compiler version.
- # Otherwise, use the new prebuilt compiler.
- if not compiler:
- set_build_os(build_os)
- set_llvm_prebuilts_version(llvm_prebuilts_version)
- else:
- set_compiler(compiler)
-
- set_flags(bench, cflags, ldflags)
-
- return 0
-
-
-def remove_tmp_dir():
- tmp_dir = os.path.join(config.android_home, 'prebuilts/clang/host/linux-x86',
- 'clang-tmp')
-
- try:
- subprocess.check_call(['rm', '-r', tmp_dir])
- except subprocess.CalledProcessError:
- logging.error('Error while removing the temporary '
- 'compiler directory %s!', tmp_dir)
- raise
-
-
-# Recover the makefile/blueprint from our patch after building
-def restore_makefile(bench):
- pwd = os.path.join(config.android_home, config.bench_dict[bench])
- mk_file = os.path.join(pwd, 'Android.mk')
- if not os.path.exists(mk_file):
- mk_file = os.path.join(pwd, 'Android.bp')
- subprocess.check_call(['mv', os.path.join(pwd, 'tmp_makefile'), mk_file])
-
-
-# Run script to build benchmark
-def build_bench(bench, source_dir):
- logging.info('Start building benchmark...')
-
- raw_cmd = ('cd {android_home} '
- '&& source build/envsetup.sh '
- '&& lunch {product_combo} '
- '&& mmma {source_dir} -j48'.format(
- android_home=config.android_home,
- product_combo=config.product_combo,
- source_dir=source_dir))
-
- log_file = os.path.join(config.bench_suite_dir, 'build_log')
- with open(log_file, 'a') as logfile:
- log_head = 'Log for building benchmark: %s\n' % (bench)
- logfile.write(log_head)
- try:
- subprocess.check_call(
- ['bash', '-c', raw_cmd], stdout=logfile, stderr=logfile)
- except subprocess.CalledProcessError:
- logging.error('Error while running %s, please check '
- '%s for more info.', raw_cmd, log_file)
- restore_makefile(bench)
- raise
-
- logging.info('Logs for building benchmark %s are written to %s.', bench,
- log_file)
- logging.info('Benchmark built successfully!')
-
-
-def main(argv):
- arguments = _parse_arguments_internal(argv)
-
- bench = arguments.bench
- compiler = arguments.compiler_dir
- build_os = arguments.build_os
- llvm_version = arguments.llvm_prebuilts_version
- cflags = arguments.cflags
- ldflags = arguments.ldflags
-
- try:
- source_dir = config.bench_dict[bench]
- except KeyError:
- logging.error('Please select one benchmark from the list below:\n\t' +
- '\n\t'.join(config.bench_list))
- raise
-
- set_compiler_env(bench, compiler, build_os, llvm_version, cflags, ldflags)
-
- build_bench(bench, source_dir)
-
- # If flags has been set, remember to restore the makefile/blueprint to
- # original ones.
- restore_makefile(bench)
-
- # If a tmp directory is used for compiler path, remove it after building.
- if compiler:
- remove_tmp_dir()
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/android_bench_suite/config.py b/android_bench_suite/config.py
deleted file mode 100644
index 4cfd261..0000000
--- a/android_bench_suite/config.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Configuration file for the benchmark suite."""
-from __future__ import print_function
-
-import ConfigParser
-import os
-
-from parse_result import parse_Panorama
-from parse_result import parse_Dex2oat
-from parse_result import parse_Hwui
-from parse_result import parse_Skia
-from parse_result import parse_Synthmark
-from parse_result import parse_Binder
-
-from set_flags import add_flags_Panorama
-from set_flags import add_flags_Dex2oat
-from set_flags import add_flags_Hwui
-from set_flags import add_flags_Skia
-from set_flags import add_flags_Synthmark
-from set_flags import add_flags_Binder
-
-home = os.environ['HOME']
-
-# Load user configurations for default envrionments
-env_config = ConfigParser.ConfigParser(allow_no_value=True)
-env_config.read('env_setting')
-
-def get_suite_env(name, path=False):
- variable = env_config.get('Suite_Environment', name)
- if variable:
- if path and not os.path.isdir(variable):
- raise ValueError('The path of %s does not exist.' % name)
- return variable
- else:
- raise ValueError('Please specify %s in env_setting' % name)
-
-# Android source code type: internal or aosp
-android_type = get_suite_env('android_type')
-
-# Android home directory specified as android_home,
-android_home = get_suite_env('android_home', True)
-
-# The benchmark results will be saved in bench_suite_dir.
-# Please create a directory to store the results, default directory is
-# android_home/benchtoolchain
-bench_suite_dir = get_suite_env('bench_suite_dir', True)
-
-# Crosperf directory is used to generate crosperf report.
-toolchain_utils = get_suite_env('toolchain_utils', True)
-
-# Please change both product and architecture at same time
-# Product can be chosen from the lunch list of android building.
-product_combo = get_suite_env('product_combo')
-
-# Arch can be found from out/target/product
-product = get_suite_env('product')
-
-# Benchmarks list is in following variables, you can change it adding new
-# benchmarks.
-bench_dict = {
- 'Panorama': 'packages/apps/LegacyCamera/jni/',
- 'Dex2oat': 'art/compiler/',
- 'Hwui': 'frameworks/base/libs/hwui/',
- 'Skia': 'external/skia/',
- 'Synthmark': 'synthmark/',
- 'Binder': 'frameworks/native/libs/binder/',
-}
-
-bench_parser_dict = {
- 'Panorama': parse_Panorama,
- 'Dex2oat': parse_Dex2oat,
- 'Hwui': parse_Hwui,
- 'Skia': parse_Skia,
- 'Synthmark': parse_Synthmark,
- 'Binder': parse_Binder,
-}
-
-bench_flags_dict = {
- 'Panorama': add_flags_Panorama,
- 'Dex2oat': add_flags_Dex2oat,
- 'Hwui': add_flags_Hwui,
- 'Skia': add_flags_Skia,
- 'Synthmark': add_flags_Synthmark,
- 'Binder': add_flags_Binder,
-}
-
-bench_list = bench_dict.keys()
-
-# Directories used in the benchmark suite
-autotest_dir = 'external/autotest/'
-out_dir = os.path.join(android_home, 'out')
diff --git a/android_bench_suite/dex2oat_input/test1.apk b/android_bench_suite/dex2oat_input/test1.apk
deleted file mode 100644
index 16cc638..0000000
--- a/android_bench_suite/dex2oat_input/test1.apk
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/dex2oat_input/test2.apk b/android_bench_suite/dex2oat_input/test2.apk
deleted file mode 100644
index 4dc0aa0..0000000
--- a/android_bench_suite/dex2oat_input/test2.apk
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/discard_patches.py b/android_bench_suite/discard_patches.py
deleted file mode 100755
index 7a4b0b5..0000000
--- a/android_bench_suite/discard_patches.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Script to discard all the patches added to Android for this suite"""
-
-from __future__ import print_function
-
-import config
-import os
-import subprocess
-
-
-def discard_git(path):
- try:
- subprocess.check_call(['git', '-C', path, 'reset'])
- subprocess.check_call(['git', '-C', path, 'clean', '-fdx'])
- subprocess.check_call(['git', '-C', path, 'stash'])
- print('Patch in %s removed successfully!' % path)
- except subprocess.CalledProcessError:
- print('Error while removing patch in %s' % path)
-
-
-def dispatch_skia():
- skia_dir = os.path.join(config.android_home, config.bench_dict['Skia'])
- discard_git(skia_dir)
-
-
-def dispatch_autotest():
- autotest_dir = os.path.join(config.android_home, config.autotest_dir)
- discard_git(autotest_dir)
-
-
-def dispatch_panorama():
- panorama_dir = os.path.join(config.android_home,
- config.bench_dict['Panorama'])
- discard_git(panorama_dir)
-
-
-def dispatch_synthmark():
- synthmark_dir = 'synthmark'
- try:
- subprocess.check_call(
- ['rm', '-rf',
- os.path.join(config.android_home, synthmark_dir)])
- print('Synthmark patch removed successfully!')
- except subprocess.CalledProcessError:
- print('Synthmark is not removed. Error occurred.')
-
-
-def main():
- dispatch_skia()
- dispatch_autotest()
- dispatch_panorama()
- dispatch_synthmark()
-
-
-if __name__ == '__main__':
- main()
diff --git a/android_bench_suite/env_setting b/android_bench_suite/env_setting
deleted file mode 100644
index 397888b..0000000
--- a/android_bench_suite/env_setting
+++ /dev/null
@@ -1,31 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-[Suite_Environment]
-
-# Please indicate the android source type: internal or aosp
-
-android_type = aosp
-
-# This is basically the root of your android sources.
-
-android_home = /work/android/aosp
-
-# This is the directory where you want to put the logs and results to.
-# Please create a directory first before specify it.
-
-bench_suite_dir = /work/android/bench_suite_dir
-
-# Crosperf is a toolchain utility to help generating html report of the test.
-# Please specify the path to it here.
-
-toolchain_utils = /chromium_source/src/third_party/toolchain-utils/crosperf
-
-# Product_combo and product is the type of image you want to build.
-# The product_combo list can be found with 'lunch' instruction.
-# The product is usually the middle part of the combo name, also can be found in
-# $ANDROID_HOME/out/target/product/
-
-product_combo = aosp_bullhead-userdebug
-product = bullhead
diff --git a/android_bench_suite/fix_json.py b/android_bench_suite/fix_json.py
deleted file mode 100755
index cf94dd6..0000000
--- a/android_bench_suite/fix_json.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# pylint: disable=cros-logging-import
-
-"""Script to re-format json result to one with branch_name and build_id"""
-from __future__ import print_function
-
-import argparse
-import config
-import json
-import logging
-import os
-import subprocess
-import sys
-
-# Turn the logging level to INFO before importing other autotest
-# code, to avoid having failed import logging messages confuse the
-# test_droid user.
-logging.basicConfig(level=logging.INFO)
-
-
-def _parse_arguments_internal(argv):
- parser = argparse.ArgumentParser(description='Convert result to JSON'
- 'format')
- parser.add_argument(
- '-b', '--bench', help='Generate JSON format file for which benchmark.')
- return parser.parse_args(argv)
-
-def fix_json(bench):
- # Set environment variable for crosperf
- os.environ['PYTHONPATH'] = os.path.dirname(config.toolchain_utils)
-
- logging.info('Generating Crosperf Report...')
- json_path = os.path.join(config.bench_suite_dir, bench + '_refined')
- crosperf_cmd = [
- os.path.join(config.toolchain_utils, 'generate_report.py'), '--json',
- '-i=' + os.path.join(config.bench_suite_dir, bench + '.json'),
- '-o=' + json_path, '-f'
- ]
-
- # Run crosperf generate_report.py
- logging.info('Command: %s', crosperf_cmd)
- subprocess.call(crosperf_cmd)
-
- json_path += '.json'
- with open(json_path) as fout:
- objs = json.load(fout)
- for obj in objs:
- obj['branch_name'] = 'aosp/master'
- obj['build_id'] = 0
- with open(json_path, 'w') as fout:
- json.dump(objs, fout)
-
- logging.info('JSON file fixed successfully!')
-
-def main(argv):
- arguments = _parse_arguments_internal(argv)
-
- bench = arguments.bench
-
- fix_json(bench)
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/android_bench_suite/fix_skia_results.py b/android_bench_suite/fix_skia_results.py
deleted file mode 100755
index 84dee5a..0000000
--- a/android_bench_suite/fix_skia_results.py
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# pylint: disable=cros-logging-import
-
-"""Transforms skia benchmark results to ones that crosperf can understand."""
-
-from __future__ import print_function
-
-import itertools
-import logging
-import json
-import sys
-
-# Turn the logging level to INFO before importing other autotest
-# code, to avoid having failed import logging messages confuse the
-# test_droid user.
-logging.basicConfig(level=logging.INFO)
-
-# All of the results we care about, by name.
-# Each of these *must* end in _ns, _us, _ms, or _s, since all the metrics we
-# collect (so far) are related to time, and we alter the results based on the
-# suffix of these strings (so we don't have 0.000421ms per sample, for example)
-_RESULT_RENAMES = {
- 'memset32_100000_640_480_nonrendering': 'memset_time_ms',
- 'path_equality_50%_640_480_nonrendering': 'path_equality_ns',
- 'sort_qsort_backward_640_480_nonrendering': 'qsort_us'
-}
-
-
-def _GetFamiliarName(name):
- r = _RESULT_RENAMES[name]
- return r if r else name
-
-
-def _IsResultInteresting(name):
- return name in _RESULT_RENAMES
-
-
-def _GetTimeMultiplier(label_name):
- """Given a time (in milliseconds), normalize it to what label_name expects.
-
- "What label_name expects" meaning "we pattern match against the last few
- non-space chars in label_name."
-
- This expects the time unit to be separated from anything else by '_'.
- """
- ms_mul = 1000 * 1000.
- endings = [('_ns', 1), ('_us', 1000), ('_ms', ms_mul), ('_s', ms_mul * 1000)]
- for end, mul in endings:
- if label_name.endswith(end):
- return ms_mul / mul
- raise ValueError('Unknown ending in "%s"; expecting one of %s' %
- (label_name, [end for end, _ in endings]))
-
-
-def _GetTimeDenom(ms):
- """Express times in a common time unit.
-
- Given a list of times (in milliseconds), find a time unit in which
- they can all be expressed.
-
- Returns the unit name, and `ms` normalized to that time unit.
-
- >>> _GetTimeDenom([1, 2, 3])
- ('ms', [1.0, 2.0, 3.0])
- >>> _GetTimeDenom([.1, .2, .3])
- ('us', [100.0, 200.0, 300.0])
- """
-
- ms_mul = 1000 * 1000
- units = [('us', 1000), ('ms', ms_mul), ('s', ms_mul * 1000)]
- for name, mul in reversed(units):
- normalized = [float(t) * ms_mul / mul for t in ms]
- average = sum(normalized) / len(normalized)
- if all(n > 0.1 for n in normalized) and average >= 1:
- return name, normalized
-
- normalized = [float(t) * ms_mul for t in ms]
- return 'ns', normalized
-
-
-def _TransformBenchmarks(raw_benchmarks):
- # We get {"results": {"bench_name": Results}}
- # where
- # Results = {"config_name": {"samples": [float], etc.}}
- #
- # We want {"data": {"skia": [[BenchmarkData]]},
- # "platforms": ["platform1, ..."]}
- # where
- # BenchmarkData = {"bench_name": bench_samples[N], ..., "retval": 0}
- #
- # Note that retval is awkward -- crosperf's JSON reporter reports the result
- # as a failure if it's not there. Everything else treats it like a
- # statistic...
- benchmarks = raw_benchmarks['results']
- results = []
- for bench_name, bench_result in benchmarks.items():
- try:
- for cfg_name, keyvals in bench_result.items():
- # Some benchmarks won't have timing data (either it won't exist at all,
- # or it'll be empty); skip them.
- samples = keyvals.get('samples')
- if not samples:
- continue
-
- bench_name = '%s_%s' % (bench_name, cfg_name)
- if not _IsResultInteresting(bench_name):
- continue
-
- friendly_name = _GetFamiliarName(bench_name)
- if len(results) < len(samples):
- results.extend(
- {'retval': 0} for _ in range(len(samples) - len(results)))
-
- time_mul = _GetTimeMultiplier(friendly_name)
- for sample, app in itertools.zip(samples, results):
- assert friendly_name not in app
- app[friendly_name] = sample * time_mul
- except (KeyError, ValueError) as e:
- logging.error('While converting "%s" (key: %s): %s', bench_result,
- bench_name, e)
- raise
-
- # Realistically, [results] should be multiple results, where each entry in the
- # list is the result for a different label. Because we only deal with one
- # label at the moment, we need to wrap it in its own list.
- return results
-
-
-if __name__ == '__main__':
-
- def _GetUserFile(argv):
- if not argv or argv[0] == '-':
- return sys.stdin
- return open(argv[0])
-
- def _Main():
- with _GetUserFile(sys.argv[1:]) as in_file:
- obj = json.load(in_file)
- output = _TransformBenchmarks(obj)
- json.dump(output, sys.stdout)
-
- _Main()
diff --git a/android_bench_suite/gen_json.py b/android_bench_suite/gen_json.py
deleted file mode 100755
index e125293..0000000
--- a/android_bench_suite/gen_json.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# pylint: disable=cros-logging-import
-
-"""Script to help generate json format report from raw data."""
-from __future__ import print_function
-
-import argparse
-import config
-import json
-import logging
-import sys
-
-# Turn the logging level to INFO before importing other autotest
-# code, to avoid having failed import logging messages confuse the
-# test_droid user.
-logging.basicConfig(level=logging.INFO)
-
-
-def _parse_arguments_internal(argv):
- parser = argparse.ArgumentParser(description='Convert result to JSON'
- 'format')
-
- parser.add_argument(
- '-b', '--bench', help='Generate JSON format file for which benchmark.')
-
- parser.add_argument(
- '-i', '--input', help='Specify the input result file name.')
-
- parser.add_argument(
- '-o', '--output', help='Specify the output JSON format result file')
-
- parser.add_argument(
- '-p',
- '--platform',
- help='Indicate the platform(experiment or device) name '
- 'to be shown in JSON')
-
- parser.add_argument(
- '--iterations',
- type=int,
- help='How many iterations does the result include.')
- return parser.parse_args(argv)
-
-# Collect data and generate JSON {} tuple from benchmark result
-def collect_data(infile, bench, it):
- result_dict = {}
- with open(infile + str(it)) as fin:
- if bench not in config.bench_parser_dict:
- logging.error('Please input the correct benchmark name.')
- raise ValueError('Wrong benchmark name: %s' % bench)
- parse = config.bench_parser_dict[bench]
- result_dict = parse(bench, fin)
- return result_dict
-
-# If there is no original output file, create a new one and init it.
-def create_outfile(outfile, bench):
- with open(outfile, 'w') as fout:
- obj_null = {'data': {bench.lower(): []}, 'platforms': []}
- json.dump(obj_null, fout)
-
-# Seek the original output file and try to add new result into it.
-def get_outfile(outfile, bench):
- try:
- return open(outfile)
- except IOError:
- create_outfile(outfile, bench)
- return open(outfile)
-
-def main(argv):
- arguments = _parse_arguments_internal(argv)
-
- bench = arguments.bench
- infile = arguments.input
- outfile = arguments.output
- platform = arguments.platform
- iteration = arguments.iterations
-
- result = []
- for i in range(iteration):
- result += collect_data(infile, bench, i)
-
- with get_outfile(outfile, bench) as fout:
- obj = json.load(fout)
- obj['platforms'].append(platform)
- obj['data'][bench.lower()].append(result)
- with open(outfile, 'w') as fout:
- json.dump(obj, fout)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/android_bench_suite/panorama.diff b/android_bench_suite/panorama.diff
deleted file mode 100644
index fcd214e..0000000
--- a/android_bench_suite/panorama.diff
+++ /dev/null
@@ -1,763 +0,0 @@
-diff --git a/jni/Android.mk b/jni/Android.mk
-index 8b816270..d48e0d8e 100755
---- a/jni/Android.mk
-+++ b/jni/Android.mk
-@@ -1,50 +1,53 @@
-+local_target_dir := $(TARGET_OUT_DATA)/local/tmp
-+
- LOCAL_PATH:= $(call my-dir)
-
- include $(CLEAR_VARS)
-
- LOCAL_C_INCLUDES := \
-- $(LOCAL_PATH)/feature_stab/db_vlvm \
-- $(LOCAL_PATH)/feature_stab/src \
-- $(LOCAL_PATH)/feature_stab/src/dbreg \
-- $(LOCAL_PATH)/feature_mos/src \
-- $(LOCAL_PATH)/feature_mos/src/mosaic
--
--LOCAL_CFLAGS := -O3 -DNDEBUG
--LOCAL_CPPFLAGS := -std=gnu++98
--
--LOCAL_SRC_FILES := \
-- feature_mos_jni.cpp \
-- mosaic_renderer_jni.cpp \
-- feature_mos/src/mosaic/trsMatrix.cpp \
-- feature_mos/src/mosaic/AlignFeatures.cpp \
-- feature_mos/src/mosaic/Blend.cpp \
-- feature_mos/src/mosaic/Delaunay.cpp \
-- feature_mos/src/mosaic/ImageUtils.cpp \
-- feature_mos/src/mosaic/Mosaic.cpp \
-- feature_mos/src/mosaic/Pyramid.cpp \
-- feature_mos/src/mosaic_renderer/Renderer.cpp \
-- feature_mos/src/mosaic_renderer/WarpRenderer.cpp \
-- feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp \
-- feature_mos/src/mosaic_renderer/YVURenderer.cpp \
-- feature_mos/src/mosaic_renderer/FrameBuffer.cpp \
-- feature_stab/db_vlvm/db_feature_detection.cpp \
-- feature_stab/db_vlvm/db_feature_matching.cpp \
-- feature_stab/db_vlvm/db_framestitching.cpp \
-- feature_stab/db_vlvm/db_image_homography.cpp \
-- feature_stab/db_vlvm/db_rob_image_homography.cpp \
-- feature_stab/db_vlvm/db_utilities.cpp \
-- feature_stab/db_vlvm/db_utilities_camera.cpp \
-- feature_stab/db_vlvm/db_utilities_indexing.cpp \
-- feature_stab/db_vlvm/db_utilities_linalg.cpp \
-- feature_stab/db_vlvm/db_utilities_poly.cpp \
-- feature_stab/src/dbreg/dbreg.cpp \
-- feature_stab/src/dbreg/dbstabsmooth.cpp \
-- feature_stab/src/dbreg/vp_motionmodel.c
-+ $(LOCAL_PATH)/feature_mos/src \
-+ $(LOCAL_PATH)/feature_stab/src \
-+ $(LOCAL_PATH)/feature_stab/db_vlvm
-
--LOCAL_SHARED_LIBRARIES := liblog libnativehelper libGLESv2
--#LOCAL_LDLIBS := -L$(SYSROOT)/usr/lib -ldl -llog -lGLESv2 -L$(TARGET_OUT)
-+LOCAL_SRC_FILES := benchmark.cpp \
-+ feature_mos/src/mosaic/ImageUtils.cpp \
-+ feature_mos/src/mosaic/Mosaic.cpp \
-+ feature_mos/src/mosaic/AlignFeatures.cpp \
-+ feature_mos/src/mosaic/Blend.cpp \
-+ feature_mos/src/mosaic/Pyramid.cpp \
-+ feature_mos/src/mosaic/trsMatrix.cpp \
-+ feature_mos/src/mosaic/Delaunay.cpp \
-+ feature_mos/src/mosaic_renderer/Renderer.cpp \
-+ feature_mos/src/mosaic_renderer/WarpRenderer.cpp \
-+ feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp \
-+ feature_mos/src/mosaic_renderer/YVURenderer.cpp \
-+ feature_mos/src/mosaic_renderer/FrameBuffer.cpp \
-+ feature_stab/db_vlvm/db_rob_image_homography.cpp \
-+ feature_stab/db_vlvm/db_feature_detection.cpp \
-+ feature_stab/db_vlvm/db_image_homography.cpp \
-+ feature_stab/db_vlvm/db_framestitching.cpp \
-+ feature_stab/db_vlvm/db_feature_matching.cpp \
-+ feature_stab/db_vlvm/db_utilities.cpp \
-+ feature_stab/db_vlvm/db_utilities_camera.cpp \
-+ feature_stab/db_vlvm/db_utilities_indexing.cpp \
-+ feature_stab/db_vlvm/db_utilities_linalg.cpp \
-+ feature_stab/db_vlvm/db_utilities_poly.cpp \
-+ feature_stab/src/dbreg/dbstabsmooth.cpp \
-+ feature_stab/src/dbreg/dbreg.cpp \
-+ feature_stab/src/dbreg/vp_motionmodel.c
-
--LOCAL_MODULE_TAGS := optional
-+LOCAL_CFLAGS := -O3 -DNDEBUG -Wno-unused-parameter -Wno-maybe-uninitialized
-+LOCAL_CFLAGS += $(CFLAGS_FOR_BENCH_SUITE)
-+LOCAL_LDFLAGS := $(LDFLAGS_FOR_BENCH_SUITE)
-+LOCAL_CPPFLAGS := -std=c++98
-+LOCAL_MODULE_TAGS := tests
-+LOCAL_MODULE := panorama_bench
-+LOCAL_MODULE_STEM_32 := panorama_bench
-+LOCAL_MODULE_STEM_64 := panorama_bench64
-+LOCAL_MULTILIB := both
-+LOCAL_MODULE_PATH := $(local_target_dir)
-+LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-+LOCAL_FORCE_STATIC_EXECUTABLE := true
-+LOCAL_STATIC_LIBRARIES := libc libm
-
--LOCAL_MODULE := libjni_legacymosaic
--include $(BUILD_SHARED_LIBRARY)
-+include $(BUILD_EXECUTABLE)
-diff --git a/jni/benchmark.cpp b/jni/benchmark.cpp
-new file mode 100755
-index 00000000..2a6440f4
---- /dev/null
-+++ b/jni/benchmark.cpp
-@@ -0,0 +1,131 @@
-+/*
-+ * Copyright (C) 2012 The Android Open Source Project
-+ *
-+ * Licensed under the Apache License, Version 2.0 (the "License");
-+ * you may not use this file except in compliance with the License.
-+ * You may obtain a copy of the License at
-+ *
-+ * http://www.apache.org/licenses/LICENSE-2.0
-+ *
-+ * Unless required by applicable law or agreed to in writing, software
-+ * distributed under the License is distributed on an "AS IS" BASIS,
-+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-+ * See the License for the specific language governing permissions and
-+ * limitations under the License.
-+ */
-+
-+#include <time.h>
-+#include <sys/types.h>
-+#include <sys/stat.h>
-+#include <unistd.h>
-+
-+#include "mosaic/Mosaic.h"
-+#include "mosaic/ImageUtils.h"
-+
-+#define MAX_FRAMES 200
-+#define KERNEL_ITERATIONS 10
-+
-+const int blendingType = Blend::BLEND_TYPE_HORZ;
-+const int stripType = Blend::STRIP_TYPE_WIDE;
-+
-+ImageType yvuFrames[MAX_FRAMES];
-+
-+int loadImages(const char* basename, int &width, int &height)
-+{
-+ char filename[512];
-+ struct stat filestat;
-+ int i;
-+
-+ for (i = 0; i < MAX_FRAMES; i++) {
-+ sprintf(filename, "%s_%03d.ppm", basename, i + 1);
-+ if (stat(filename, &filestat) != 0) break;
-+ ImageType rgbFrame = ImageUtils::readBinaryPPM(filename, width, height);
-+ yvuFrames[i] = ImageUtils::allocateImage(width, height,
-+ ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
-+ ImageUtils::rgb2yvu(yvuFrames[i], rgbFrame, width, height);
-+ ImageUtils::freeImage(rgbFrame);
-+ }
-+ return i;
-+}
-+
-+int main(int argc, char **argv)
-+{
-+ struct timespec t1, t2, t3;
-+
-+ int width, height;
-+ float totalElapsedTime = 0;
-+
-+ const char *basename;
-+ const char *filename;
-+
-+ if (argc != 3) {
-+ printf("Usage: %s input_dir output_filename\n", argv[0]);
-+ return 0;
-+ } else {
-+ basename = argv[1];
-+ filename = argv[2];
-+ }
-+
-+ // Load the images outside the computational kernel
-+ int totalFrames = loadImages(basename, width, height);
-+
-+ if (totalFrames == 0) {
-+ printf("Image files not found. Make sure %s exists.\n",
-+ basename);
-+ return 1;
-+ }
-+
-+ printf("%d frames loaded\n", totalFrames);
-+
-+
-+ // Interesting stuff is here
-+ for (int iteration = 0; iteration < KERNEL_ITERATIONS; iteration++) {
-+ Mosaic mosaic;
-+
-+ mosaic.initialize(blendingType, stripType, width, height, -1, false, 0);
-+
-+ clock_gettime(CLOCK_MONOTONIC, &t1);
-+ for (int i = 0; i < totalFrames; i++) {
-+ mosaic.addFrame(yvuFrames[i]);
-+ }
-+ clock_gettime(CLOCK_MONOTONIC, &t2);
-+
-+ float progress = 0.0;
-+ bool cancelComputation = false;
-+
-+ mosaic.createMosaic(progress, cancelComputation);
-+
-+ int mosaicWidth, mosaicHeight;
-+ ImageType resultYVU = mosaic.getMosaic(mosaicWidth, mosaicHeight);
-+
-+ ImageType imageRGB = ImageUtils::allocateImage(
-+ mosaicWidth, mosaicHeight, ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
-+
-+ clock_gettime(CLOCK_MONOTONIC, &t3);
-+
-+ float elapsedTime =
-+ (t3.tv_sec - t1.tv_sec) + (t3.tv_nsec - t1.tv_nsec)/1e9;
-+ float addImageTime =
-+ (t2.tv_sec - t1.tv_sec) + (t2.tv_nsec - t1.tv_nsec)/1e9;
-+ float stitchImageTime =
-+ (t3.tv_sec - t2.tv_sec) + (t3.tv_nsec - t2.tv_nsec)/1e9;
-+
-+ totalElapsedTime += elapsedTime;
-+
-+ printf("Iteration %d: %dx%d moasic created: "
-+ "%.2f seconds (%.2f + %.2f)\n",
-+ iteration, mosaicWidth, mosaicHeight,
-+ elapsedTime, addImageTime, stitchImageTime);
-+
-+ // Write the output only once for correctness check
-+ if (iteration == 0) {
-+ ImageUtils::yvu2rgb(imageRGB, resultYVU, mosaicWidth,
-+ mosaicHeight);
-+ ImageUtils::writeBinaryPPM(imageRGB, filename, mosaicWidth,
-+ mosaicHeight);
-+ }
-+ }
-+ printf("Total elapsed time: %.2f seconds\n", totalElapsedTime);
-+
-+ return 0;
-+}
-diff --git a/jni/feature_mos/src/mosaic/AlignFeatures.cpp b/jni/feature_mos/src/mosaic/AlignFeatures.cpp
-index aeabf8f9..703a5ea5 100644
---- a/jni/feature_mos/src/mosaic/AlignFeatures.cpp
-+++ b/jni/feature_mos/src/mosaic/AlignFeatures.cpp
-@@ -30,6 +30,8 @@
-
- #define LOG_TAG "AlignFeatures"
-
-+const double Align::DEFAULT_MAX_DISPARITY = 0.1;
-+
- Align::Align()
- {
- width = height = 0;
-diff --git a/jni/feature_mos/src/mosaic/AlignFeatures.h b/jni/feature_mos/src/mosaic/AlignFeatures.h
-index 19f39051..9999f575 100644
---- a/jni/feature_mos/src/mosaic/AlignFeatures.h
-+++ b/jni/feature_mos/src/mosaic/AlignFeatures.h
-@@ -44,7 +44,7 @@ public:
- ///// Settings for feature-based alignment
- // Number of features to use from corner detection
- static const int DEFAULT_NR_CORNERS=750;
-- static const double DEFAULT_MAX_DISPARITY=0.1;//0.4;
-+ static const double DEFAULT_MAX_DISPARITY;//0.4;
- // Type of homography to model
- static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_R_T;
- // static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_PROJECTIVE;
-diff --git a/jni/feature_mos/src/mosaic/Blend.cpp b/jni/feature_mos/src/mosaic/Blend.cpp
-index e37755de..b6a843a2 100644
---- a/jni/feature_mos/src/mosaic/Blend.cpp
-+++ b/jni/feature_mos/src/mosaic/Blend.cpp
-@@ -26,8 +26,8 @@
- #include "Geometry.h"
- #include "trsMatrix.h"
-
--#include "Log.h"
--#define LOG_TAG "BLEND"
-+const float Blend::LIMIT_SIZE_MULTIPLIER = 50.f * 2.0f;
-+const float Blend::LIMIT_HEIGHT_MULTIPLIER = 2.5f;
-
- Blend::Blend()
- {
-@@ -67,7 +67,6 @@ int Blend::initialize(int blendingType, int stripType, int frame_width, int fram
-
- if (!m_pFrameYPyr || !m_pFrameUPyr || !m_pFrameVPyr)
- {
-- LOGE("Error: Could not allocate pyramids for blending");
- return BLEND_RET_ERROR_MEMORY;
- }
-
-@@ -122,7 +121,6 @@ int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
-
- if (numCenters == 0)
- {
-- LOGE("Error: No frames to blend");
- return BLEND_RET_ERROR;
- }
-
-@@ -228,9 +226,6 @@ int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
-
- if (xRightMost <= xLeftMost || yBottomMost <= yTopMost)
- {
-- LOGE("RunBlend: aborting -consistency check failed,"
-- "(xLeftMost, xRightMost, yTopMost, yBottomMost): (%d, %d, %d, %d)",
-- xLeftMost, xRightMost, yTopMost, yBottomMost);
- return BLEND_RET_ERROR;
- }
-
-@@ -241,17 +236,12 @@ int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
- ret = MosaicSizeCheck(LIMIT_SIZE_MULTIPLIER, LIMIT_HEIGHT_MULTIPLIER);
- if (ret != BLEND_RET_OK)
- {
-- LOGE("RunBlend: aborting - mosaic size check failed, "
-- "(frame_width, frame_height) vs (mosaic_width, mosaic_height): "
-- "(%d, %d) vs (%d, %d)", width, height, Mwidth, Mheight);
- return ret;
- }
-
-- LOGI("Allocate mosaic image for blending - size: %d x %d", Mwidth, Mheight);
- YUVinfo *imgMos = YUVinfo::allocateImage(Mwidth, Mheight);
- if (imgMos == NULL)
- {
-- LOGE("RunBlend: aborting - couldn't alloc %d x %d mosaic image", Mwidth, Mheight);
- return BLEND_RET_ERROR_MEMORY;
- }
-
-@@ -362,7 +352,6 @@ int Blend::FillFramePyramid(MosaicFrame *mb)
- !PyramidShort::BorderReduce(m_pFrameUPyr, m_wb.nlevsC) || !PyramidShort::BorderExpand(m_pFrameUPyr, m_wb.nlevsC, -1) ||
- !PyramidShort::BorderReduce(m_pFrameVPyr, m_wb.nlevsC) || !PyramidShort::BorderExpand(m_pFrameVPyr, m_wb.nlevsC, -1))
- {
-- LOGE("Error: Could not generate Laplacian pyramids");
- return BLEND_RET_ERROR;
- }
- else
-@@ -384,7 +373,6 @@ int Blend::DoMergeAndBlend(MosaicFrame **frames, int nsite,
- m_pMosaicVPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevsC,(unsigned short)rect.Width(),(unsigned short)rect.Height(),BORDER);
- if (!m_pMosaicYPyr || !m_pMosaicUPyr || !m_pMosaicVPyr)
- {
-- LOGE("Error: Could not allocate pyramids for blending");
- return BLEND_RET_ERROR_MEMORY;
- }
-
-@@ -579,6 +567,11 @@ int Blend::DoMergeAndBlend(MosaicFrame **frames, int nsite,
- // Blend
- PerformFinalBlending(imgMos, cropping_rect);
-
-+ if (cropping_rect.Width() <= 0 || cropping_rect.Height() <= 0)
-+ {
-+ return BLEND_RET_ERROR;
-+ }
-+
- if (m_pMosaicVPyr) free(m_pMosaicVPyr);
- if (m_pMosaicUPyr) free(m_pMosaicUPyr);
- if (m_pMosaicYPyr) free(m_pMosaicYPyr);
-@@ -632,7 +625,6 @@ int Blend::PerformFinalBlending(YUVinfo &imgMos, MosaicRect &cropping_rect)
- if (!PyramidShort::BorderExpand(m_pMosaicYPyr, m_wb.nlevs, 1) || !PyramidShort::BorderExpand(m_pMosaicUPyr, m_wb.nlevsC, 1) ||
- !PyramidShort::BorderExpand(m_pMosaicVPyr, m_wb.nlevsC, 1))
- {
-- LOGE("Error: Could not BorderExpand!");
- return BLEND_RET_ERROR;
- }
-
-@@ -785,18 +777,31 @@ int Blend::PerformFinalBlending(YUVinfo &imgMos, MosaicRect &cropping_rect)
- break;
- }
- }
-+
- }
-
-+ RoundingCroppingSizeToMultipleOf8(cropping_rect);
-+
- for(int j=0; j<imgMos.Y.height; j++)
- {
- delete b[j];
- }
-
-- delete b;
-+ delete[] b;
-
- return BLEND_RET_OK;
- }
-
-+void Blend::RoundingCroppingSizeToMultipleOf8(MosaicRect &rect) {
-+ int height = rect.bottom - rect.top + 1;
-+ int residue = height & 7;
-+ rect.bottom -= residue;
-+
-+ int width = rect.right - rect.left + 1;
-+ residue = width & 7;
-+ rect.right -= residue;
-+}
-+
- void Blend::ComputeMask(CSite *csite, BlendRect &vcrect, BlendRect &brect, MosaicRect &rect, YUVinfo &imgMos, int site_idx)
- {
- PyramidShort *dptr = m_pMosaicYPyr;
-diff --git a/jni/feature_mos/src/mosaic/Blend.h b/jni/feature_mos/src/mosaic/Blend.h
-index 6371fdeb..175eacd4 100644
---- a/jni/feature_mos/src/mosaic/Blend.h
-+++ b/jni/feature_mos/src/mosaic/Blend.h
-@@ -119,9 +119,10 @@ protected:
- void CropFinalMosaic(YUVinfo &imgMos, MosaicRect &cropping_rect);
-
- private:
-- static const float LIMIT_SIZE_MULTIPLIER = 5.0f * 2.0f;
-- static const float LIMIT_HEIGHT_MULTIPLIER = 2.5f;
-+ static const float LIMIT_SIZE_MULTIPLIER;
-+ static const float LIMIT_HEIGHT_MULTIPLIER;
- int MosaicSizeCheck(float sizeMultiplier, float heightMultiplier);
-+ void RoundingCroppingSizeToMultipleOf8(MosaicRect& rect);
- };
-
- #endif
-diff --git a/jni/feature_mos/src/mosaic/Delaunay.cpp b/jni/feature_mos/src/mosaic/Delaunay.cpp
-index 82f5d203..0ce09fc5 100644
---- a/jni/feature_mos/src/mosaic/Delaunay.cpp
-+++ b/jni/feature_mos/src/mosaic/Delaunay.cpp
-@@ -24,7 +24,7 @@
-
- #define QQ 9 // Optimal value as determined by testing
- #define DM 38 // 2^(1+DM/2) element sort capability. DM=38 for >10^6 elements
--#define NYL (-1)
-+#define NYL -1
- #define valid(l) ccw(orig(basel), dest(l), dest(basel))
-
-
-diff --git a/jni/feature_mos/src/mosaic/ImageUtils.cpp b/jni/feature_mos/src/mosaic/ImageUtils.cpp
-index 6d0aac0c..daa86060 100644
---- a/jni/feature_mos/src/mosaic/ImageUtils.cpp
-+++ b/jni/feature_mos/src/mosaic/ImageUtils.cpp
-@@ -283,7 +283,7 @@ ImageType ImageUtils::readBinaryPPM(const char *filename, int &width, int &heigh
-
- FILE *imgin = NULL;
- int mval=0, format=0, eret;
-- ImageType ret = IMAGE_TYPE_NOIMAGE;
-+ ImageType ret = NULL;//IMAGE_TYPE_NOIMAGE;
-
- imgin = fopen(filename, "r");
- if (imgin == NULL) {
-diff --git a/jni/feature_mos/src/mosaic/ImageUtils.h b/jni/feature_mos/src/mosaic/ImageUtils.h
-index 87782383..92965ca8 100644
---- a/jni/feature_mos/src/mosaic/ImageUtils.h
-+++ b/jni/feature_mos/src/mosaic/ImageUtils.h
-@@ -47,7 +47,7 @@ public:
- /**
- * Definition of an empty image.
- */
-- static const int IMAGE_TYPE_NOIMAGE = NULL;
-+ static const int IMAGE_TYPE_NOIMAGE = 0;
-
- /**
- * Convert image from BGR (interlaced) to YVU (non-interlaced)
-diff --git a/jni/feature_mos/src/mosaic/Log.h b/jni/feature_mos/src/mosaic/Log.h
-index cf6f14b1..2adfeda9 100644
---- a/jni/feature_mos/src/mosaic/Log.h
-+++ b/jni/feature_mos/src/mosaic/Log.h
-@@ -14,7 +14,7 @@
- * limitations under the License.
- */
- #ifndef LOG_H_
--#define LOG_H_
-+#define LOG_H_
-
- #include <android/log.h>
- #define LOGV(...) __android_log_print(ANDROID_LOG_SILENT, LOG_TAG, __VA_ARGS__)
-diff --git a/jni/feature_mos/src/mosaic/Mosaic.cpp b/jni/feature_mos/src/mosaic/Mosaic.cpp
-index f17c030b..4abc6f68 100644
---- a/jni/feature_mos/src/mosaic/Mosaic.cpp
-+++ b/jni/feature_mos/src/mosaic/Mosaic.cpp
-@@ -26,9 +26,6 @@
- #include "Mosaic.h"
- #include "trsMatrix.h"
-
--#include "Log.h"
--#define LOG_TAG "MOSAIC"
--
- Mosaic::Mosaic()
- {
- initialized = false;
-@@ -47,6 +44,10 @@ Mosaic::~Mosaic()
- delete frames;
- delete rframes;
-
-+ for (int j = 0; j < owned_size; j++)
-+ delete owned_frames[j];
-+ delete owned_frames;
-+
- if (aligner != NULL)
- delete aligner;
- if (blender != NULL)
-@@ -88,13 +89,10 @@ int Mosaic::initialize(int blendingType, int stripType, int width, int height, i
- {
- frames[i] = NULL;
- }
--
--
- }
-
-- LOGV("Initialize %d %d", width, height);
-- LOGV("Frame width %d,%d", width, height);
-- LOGV("Max num frames %d", max_frames);
-+ owned_frames = new ImageType[max_frames];
-+ owned_size = 0;
-
- aligner = new Align();
- aligner->initialize(width, height,quarter_res,thresh_still);
-@@ -107,7 +105,6 @@ int Mosaic::initialize(int blendingType, int stripType, int width, int height, i
- blender->initialize(blendingType, stripType, width, height);
- } else {
- blender = NULL;
-- LOGE("Error: Unknown blending type %d",blendingType);
- return MOSAIC_RET_ERROR;
- }
-
-@@ -123,7 +120,15 @@ int Mosaic::addFrameRGB(ImageType imageRGB)
- imageYVU = ImageUtils::allocateImage(this->width, this->height, ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
- ImageUtils::rgb2yvu(imageYVU, imageRGB, width, height);
-
-- return addFrame(imageYVU);
-+ int existing_frames_size = frames_size;
-+ int ret = addFrame(imageYVU);
-+
-+ if (frames_size > existing_frames_size)
-+ owned_frames[owned_size++] = imageYVU;
-+ else
-+ ImageUtils::freeImage(imageYVU);
-+
-+ return ret;
- }
-
- int Mosaic::addFrame(ImageType imageYVU)
-@@ -146,8 +151,6 @@ int Mosaic::addFrame(ImageType imageYVU)
-
- if (frames_size >= max_frames)
- {
-- LOGV("WARNING: More frames than preallocated, ignoring."
-- "Increase maximum number of frames (-f <max_frames>) to avoid this");
- return MOSAIC_RET_ERROR;
- }
-
-diff --git a/jni/feature_mos/src/mosaic/Mosaic.h b/jni/feature_mos/src/mosaic/Mosaic.h
-index fc6ecd90..9dea6642 100644
---- a/jni/feature_mos/src/mosaic/Mosaic.h
-+++ b/jni/feature_mos/src/mosaic/Mosaic.h
-@@ -181,6 +181,12 @@ protected:
- int frames_size;
- int max_frames;
-
-+ /**
-+ * Implicitly created frames, should be freed by Mosaic.
-+ */
-+ ImageType *owned_frames;
-+ int owned_size;
-+
- /**
- * Initialization state.
- */
-diff --git a/jni/feature_mos/src/mosaic/Pyramid.cpp b/jni/feature_mos/src/mosaic/Pyramid.cpp
-index 945eafba..b022d73d 100644
---- a/jni/feature_mos/src/mosaic/Pyramid.cpp
-+++ b/jni/feature_mos/src/mosaic/Pyramid.cpp
-@@ -154,24 +154,30 @@ void PyramidShort::BorderExpandOdd(PyramidShort *in, PyramidShort *out, PyramidS
- // Vertical Filter
- for (j = -off; j < in->height + off; j++) {
- int j2 = j * 2;
-- for (i = -scr->border; i < scr->width + scr->border; i++) {
-+ int limit = scr->width + scr->border;
-+ for (i = -scr->border; i < limit; i++) {
-+ int t1 = in->ptr[j][i];
-+ int t2 = in->ptr[j+1][i];
- scr->ptr[j2][i] = (short)
-- ((6 * in->ptr[j][i] + (in->ptr[j-1][i] + in->ptr[j+1][i]) + 4) >> 3);
-- scr->ptr[j2+1][i] = (short)((in->ptr[j][i] + in->ptr[j+1][i] + 1) >> 1);
-+ ((6 * t1 + (in->ptr[j-1][i] + t2) + 4) >> 3);
-+ scr->ptr[j2+1][i] = (short)((t1 + t2 + 1) >> 1);
- }
- }
-
- BorderSpread(scr, 0, 0, 3, 3);
-
- // Horizontal Filter
-- for (i = -off; i < scr->width + off; i++) {
-- int i2 = i * 2;
-- for (j = -out->border; j < out->height + out->border; j++) {
-+ int limit = out->height + out->border;
-+ for (j = -out->border; j < limit; j++) {
-+ for (i = -off; i < scr->width + off; i++) {
-+ int i2 = i * 2;
-+ int t1 = scr->ptr[j][i];
-+ int t2 = scr->ptr[j][i+1];
- out->ptr[j][i2] = (short) (out->ptr[j][i2] +
-- (mode * ((6 * scr->ptr[j][i] +
-- scr->ptr[j][i-1] + scr->ptr[j][i+1] + 4) >> 3)));
-+ (mode * ((6 * t1 +
-+ scr->ptr[j][i-1] + t2 + 4) >> 3)));
- out->ptr[j][i2+1] = (short) (out->ptr[j][i2+1] +
-- (mode * ((scr->ptr[j][i] + scr->ptr[j][i+1] + 1) >> 1)));
-+ (mode * ((t1 + t2 + 1) >> 1)));
- }
- }
-
-diff --git a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
-index 9a07e496..a956f23b 100755
---- a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
-+++ b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
-@@ -55,6 +55,8 @@ bool FrameBuffer::Init(int width, int height, GLenum format) {
- GL_TEXTURE_2D,
- mTextureName,
- 0);
-+ checkFramebufferStatus("FrameBuffer.cpp");
-+ checkGlError("framebuffertexture2d");
-
- if (!checkGlError("texture setup")) {
- return false;
-@@ -94,6 +96,3 @@ int FrameBuffer::GetWidth() const {
- int FrameBuffer::GetHeight() const {
- return mHeight;
- }
--
--
--
-diff --git a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h
-index b6a20ad1..314b1262 100755
---- a/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h
-+++ b/jni/feature_mos/src/mosaic_renderer/FrameBuffer.h
-@@ -4,7 +4,10 @@
- #include <GLES2/gl2.h>
- #include <GLES2/gl2ext.h>
-
--extern bool checkGlError(const char* op);
-+#define checkGlError(op) checkGLErrorDetail(__FILE__, __LINE__, (op))
-+
-+extern bool checkGLErrorDetail(const char* file, int line, const char* op);
-+extern void checkFramebufferStatus(const char* name);
-
- class FrameBuffer {
- public:
-diff --git a/jni/feature_mos/src/mosaic_renderer/Renderer.cpp b/jni/feature_mos/src/mosaic_renderer/Renderer.cpp
-index c5c143f9..b9938eb6 100755
---- a/jni/feature_mos/src/mosaic_renderer/Renderer.cpp
-+++ b/jni/feature_mos/src/mosaic_renderer/Renderer.cpp
-@@ -87,7 +87,7 @@ GLuint Renderer::createProgram(const char* pVertexSource, const char* pFragmentS
- GLint linkStatus = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
-
-- LOGI("Program Linked!");
-+ LOGI("Program Linked (%d)!", program);
-
- if (linkStatus != GL_TRUE)
- {
-diff --git a/jni/feature_stab/db_vlvm/db_utilities_constants.h b/jni/feature_stab/db_vlvm/db_utilities_constants.h
-index 612fc783..07565efd 100644
---- a/jni/feature_stab/db_vlvm/db_utilities_constants.h
-+++ b/jni/feature_stab/db_vlvm/db_utilities_constants.h
-@@ -64,7 +64,7 @@
- #define DB_DEFAULT_ABS_CORNER_THRESHOLD 50000000.0
- #define DB_DEFAULT_REL_CORNER_THRESHOLD 0.00005
- #define DB_DEFAULT_MAX_DISPARITY 0.1
--#define DB_DEFAULT_NO_DISPARITY (-1.0)
-+#define DB_DEFAULT_NO_DISPARITY -1.0
- #define DB_DEFAULT_MAX_TRACK_LENGTH 300
-
- #define DB_DEFAULT_MAX_NR_CAMERAS 1000
-diff --git a/jni/feature_stab/src/dbreg/dbreg.cpp b/jni/feature_stab/src/dbreg/dbreg.cpp
-index dc7d58fe..da06aa2a 100644
---- a/jni/feature_stab/src/dbreg/dbreg.cpp
-+++ b/jni/feature_stab/src/dbreg/dbreg.cpp
-@@ -485,7 +485,8 @@ int db_FrameToReferenceRegistration::AddFrame(const unsigned char * const * im,
- if(m_do_motion_smoothing)
- SmoothMotion();
-
-- db_PrintDoubleMatrix(m_H_ref_to_ins,3,3);
-+ // Disable debug printing
-+ // db_PrintDoubleMatrix(m_H_ref_to_ins,3,3);
-
- db_Copy9(H, m_H_ref_to_ins);
-
-diff --git a/jni/feature_stab/src/dbreg/dbstabsmooth.cpp b/jni/feature_stab/src/dbreg/dbstabsmooth.cpp
-index dffff8ab..2bb5d2e5 100644
---- a/jni/feature_stab/src/dbreg/dbstabsmooth.cpp
-+++ b/jni/feature_stab/src/dbreg/dbstabsmooth.cpp
-@@ -136,7 +136,7 @@ bool db_StabilizationSmoother::smoothMotionAdaptive(/*VP_BIMG *bimg,*/int hsize,
- smoothFactor = minSmoothFactor;
-
- // Find the amount of motion that must be compensated so that no "border" pixels are seen in the stable video
-- for (smoothFactor = smoothFactor; smoothFactor >= minSmoothFactor; smoothFactor -= 0.01) {
-+ for (; smoothFactor >= minSmoothFactor; smoothFactor -= 0.01) {
- // Compute the smoothed motion
- if(!smoothMotion(inmot, &tmpMotion, smoothFactor))
- break;
-diff --git a/jni/feature_stab/src/dbreg/vp_motionmodel.h b/jni/feature_stab/src/dbreg/vp_motionmodel.h
-index 71a7f7e7..a63ac001 100644
---- a/jni/feature_stab/src/dbreg/vp_motionmodel.h
-+++ b/jni/feature_stab/src/dbreg/vp_motionmodel.h
-@@ -120,7 +120,7 @@ enum VP_MOTION_MODEL {
- VP_VFE_AFFINE=120
- };
-
--#define VP_REFID (-1) /* Default ID used for reference frame */
-+#define VP_REFID -1 /* Default ID used for reference frame */
-
- typedef struct {
- VP_TRS par; /* Contains the motion paramerers.
-@@ -205,16 +205,16 @@ typedef struct {
- /* Warp a 2d point (assuming the z component is zero) */
- #define VP_WARP_POINT_2D(inx,iny,m,outx,outy) do {\
- VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWW(m); \
-- (outx) = (MXX(m)*((VP_PAR)(inx))+MXY(m)*((VP_PAR)(iny))+MXW(m))/vpTmpWarpPnt___; \
-- (outy) = (MYX(m)*((VP_PAR)(inx))+MYY(m)*((VP_PAR)(iny))+MYW(m))/vpTmpWarpPnt___; } while (0)
-+ outx = (MXX(m)*((VP_PAR)inx)+MXY(m)*((VP_PAR)iny)+MXW(m))/vpTmpWarpPnt___; \
-+ outy = (MYX(m)*((VP_PAR)inx)+MYY(m)*((VP_PAR)iny)+MYW(m))/vpTmpWarpPnt___; } while (0)
-
- /* Warp a 3d point */
- #define VP_WARP_POINT_3D(inx,iny,inz,m,outx,outy,outz) do {\
-- VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWZ(m)*((VP_PAR)(inz))+MWW(m); \
-- (outx) = (MXX(m)*((VP_PAR)(inx))+MXY(m)*((VP_PAR)(iny))+MXZ(m)*((VP_PAR)(inz))+MXW(m))/vpTmpWarpPnt___; \
-- (outy) = (MYX(m)*((VP_PAR)(inx))+MYY(m)*((VP_PAR)(iny))+MYZ(m)*((VP_PAR)(inz))+MYW(m))/vpTmpWarpPnt___; \
-- (outz) = MZX(m)*((VP_PAR)(inx))+MZY(m)*((VP_PAR)(iny))+MZZ(m)*((VP_PAR)(inz))+MZW(m); \
-- if ((m).type==VP_MOTION_PROJ_3D) (outz)/=vpTmpWarpPnt___; } while (0)
-+ VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWZ(m)*((VP_PAR)inz)+MWW(m); \
-+ outx = (MXX(m)*((VP_PAR)inx)+MXY(m)*((VP_PAR)iny)+MXZ(m)*((VP_PAR)inz)+MXW(m))/vpTmpWarpPnt___; \
-+ outy = (MYX(m)*((VP_PAR)inx)+MYY(m)*((VP_PAR)iny)+MYZ(m)*((VP_PAR)inz)+MYW(m))/vpTmpWarpPnt___; \
-+ outz = MZX(m)*((VP_PAR)inx)+MZY(m)*((VP_PAR)iny)+MZZ(m)*((VP_PAR)inz)+MZW(m); \
-+ if ((m).type==VP_MOTION_PROJ_3D) outz/=vpTmpWarpPnt___; } while (0)
-
- /* Projections of each component */
- #define VP_PROJW_3D(m,x,y,z,f) ( MWX(m)*(x)+MWY(m)*(y)+MWZ(m)*(z)+MWW(m) )
-@@ -224,15 +224,15 @@ typedef struct {
-
- /* Scale Down a matrix by Sfactor */
- #define VP_SCALEDOWN(m,Sfactor) do { \
-- MXW(m) /= (VP_PAR)(Sfactor); MWX(m) *= (VP_PAR)(Sfactor); \
-- MYW(m) /= (VP_PAR)(Sfactor); MWY(m) *= (VP_PAR)(Sfactor); \
-- MZW(m) /= (VP_PAR)(Sfactor); MWZ(m) *= (VP_PAR)(Sfactor); } while (0)
-+ MXW(m) /= (VP_PAR)Sfactor; MWX(m) *= (VP_PAR)Sfactor; \
-+ MYW(m) /= (VP_PAR)Sfactor; MWY(m) *= (VP_PAR)Sfactor; \
-+ MZW(m) /= (VP_PAR)Sfactor; MWZ(m) *= (VP_PAR)Sfactor; } while (0)
-
- /* Scale Up a matrix by Sfactor */
- #define VP_SCALEUP(m,Sfactor) do { \
-- MXW(m) *= (VP_PAR)(Sfactor); MWX(m) /= (VP_PAR)(Sfactor); \
-- MYW(m) *= (VP_PAR)(Sfactor); MWY(m) /= (VP_PAR)(Sfactor); \
-- MZW(m) *= (VP_PAR)(Sfactor); MWZ(m) /= (VP_PAR)(Sfactor); } while (0)
-+ MXW(m) *= (VP_PAR)Sfactor; MWX(m) /= (VP_PAR)Sfactor; \
-+ MYW(m) *= (VP_PAR)Sfactor; MWY(m) /= (VP_PAR)Sfactor; \
-+ MZW(m) *= (VP_PAR)Sfactor; MWZ(m) /= (VP_PAR)Sfactor; } while (0)
-
- /* Normalize the transformation matrix so that MWW is 1 */
- #define VP_NORMALIZE(m) if (MWW(m)!=(VP_PAR)0.0) do { \
-@@ -253,15 +253,15 @@ typedef struct {
-
- /* w' projection given a point x,y,0,f */
- #define VP_PROJZ(m,x,y,f) ( \
-- MWX(m)*((VP_PAR)(x))+MWY(m)*((VP_PAR)(y))+MWW(m)*((VP_PAR)(f)))
-+ MWX(m)*((VP_PAR)x)+MWY(m)*((VP_PAR)y)+MWW(m)*((VP_PAR)f))
-
- /* X Projection given a point x,y,0,f and w' */
- #define VP_PROJX(m,x,y,w,f) (\
-- (MXX(m)*((VP_PAR)(x))+MXY(m)*((VP_PAR)(y))+MXW(m)*((VP_PAR)(f)))/((VP_PAR)(w)))
-+ (MXX(m)*((VP_PAR)x)+MXY(m)*((VP_PAR)y)+MXW(m)*((VP_PAR)f))/((VP_PAR)w))
-
- /* Y Projection given a point x,y,0,f and the w' */
- #define VP_PROJY(m,x,y,w,f) (\
-- (MYX(m)*((VP_PAR)(x))+MYY(m)*((VP_PAR)(y))+MYW(m)*((VP_PAR)(f)))/((VP_PAR)(w)))
-+ (MYX(m)*((VP_PAR)x)+MYY(m)*((VP_PAR)y)+MYW(m)*((VP_PAR)f))/((VP_PAR)w))
-
- /* Set the reference id for a motion */
- #define VP_SET_REFID(m,id) do { (m).refid=id; } while (0)
diff --git a/android_bench_suite/panorama_input/test_001.ppm b/android_bench_suite/panorama_input/test_001.ppm
deleted file mode 100644
index e7218bf..0000000
--- a/android_bench_suite/panorama_input/test_001.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_002.ppm b/android_bench_suite/panorama_input/test_002.ppm
deleted file mode 100644
index 8975073..0000000
--- a/android_bench_suite/panorama_input/test_002.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_003.ppm b/android_bench_suite/panorama_input/test_003.ppm
deleted file mode 100644
index 58c9e34..0000000
--- a/android_bench_suite/panorama_input/test_003.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_004.ppm b/android_bench_suite/panorama_input/test_004.ppm
deleted file mode 100644
index 142c76b..0000000
--- a/android_bench_suite/panorama_input/test_004.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_005.ppm b/android_bench_suite/panorama_input/test_005.ppm
deleted file mode 100644
index ff229d3..0000000
--- a/android_bench_suite/panorama_input/test_005.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_006.ppm b/android_bench_suite/panorama_input/test_006.ppm
deleted file mode 100644
index 2fc5c09..0000000
--- a/android_bench_suite/panorama_input/test_006.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_007.ppm b/android_bench_suite/panorama_input/test_007.ppm
deleted file mode 100644
index d7f6a9a..0000000
--- a/android_bench_suite/panorama_input/test_007.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_008.ppm b/android_bench_suite/panorama_input/test_008.ppm
deleted file mode 100644
index 86d92b3..0000000
--- a/android_bench_suite/panorama_input/test_008.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_009.ppm b/android_bench_suite/panorama_input/test_009.ppm
deleted file mode 100644
index 72dd05f..0000000
--- a/android_bench_suite/panorama_input/test_009.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_010.ppm b/android_bench_suite/panorama_input/test_010.ppm
deleted file mode 100644
index a09a054..0000000
--- a/android_bench_suite/panorama_input/test_010.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_011.ppm b/android_bench_suite/panorama_input/test_011.ppm
deleted file mode 100644
index be7b61b..0000000
--- a/android_bench_suite/panorama_input/test_011.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_012.ppm b/android_bench_suite/panorama_input/test_012.ppm
deleted file mode 100644
index 67fad4a..0000000
--- a/android_bench_suite/panorama_input/test_012.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_013.ppm b/android_bench_suite/panorama_input/test_013.ppm
deleted file mode 100644
index 6d92fd1..0000000
--- a/android_bench_suite/panorama_input/test_013.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_014.ppm b/android_bench_suite/panorama_input/test_014.ppm
deleted file mode 100644
index 97aff41..0000000
--- a/android_bench_suite/panorama_input/test_014.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_015.ppm b/android_bench_suite/panorama_input/test_015.ppm
deleted file mode 100644
index d1de251..0000000
--- a/android_bench_suite/panorama_input/test_015.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_016.ppm b/android_bench_suite/panorama_input/test_016.ppm
deleted file mode 100644
index 70ea1f5..0000000
--- a/android_bench_suite/panorama_input/test_016.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_017.ppm b/android_bench_suite/panorama_input/test_017.ppm
deleted file mode 100644
index e075c9e..0000000
--- a/android_bench_suite/panorama_input/test_017.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_018.ppm b/android_bench_suite/panorama_input/test_018.ppm
deleted file mode 100644
index adf023b..0000000
--- a/android_bench_suite/panorama_input/test_018.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_019.ppm b/android_bench_suite/panorama_input/test_019.ppm
deleted file mode 100644
index 1f27d1d..0000000
--- a/android_bench_suite/panorama_input/test_019.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_020.ppm b/android_bench_suite/panorama_input/test_020.ppm
deleted file mode 100644
index fb95f52..0000000
--- a/android_bench_suite/panorama_input/test_020.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_021.ppm b/android_bench_suite/panorama_input/test_021.ppm
deleted file mode 100644
index 43baadf..0000000
--- a/android_bench_suite/panorama_input/test_021.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_022.ppm b/android_bench_suite/panorama_input/test_022.ppm
deleted file mode 100644
index f928c83..0000000
--- a/android_bench_suite/panorama_input/test_022.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_023.ppm b/android_bench_suite/panorama_input/test_023.ppm
deleted file mode 100644
index e21b275..0000000
--- a/android_bench_suite/panorama_input/test_023.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_024.ppm b/android_bench_suite/panorama_input/test_024.ppm
deleted file mode 100644
index 43ba0ba..0000000
--- a/android_bench_suite/panorama_input/test_024.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_025.ppm b/android_bench_suite/panorama_input/test_025.ppm
deleted file mode 100644
index b9f8892..0000000
--- a/android_bench_suite/panorama_input/test_025.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_026.ppm b/android_bench_suite/panorama_input/test_026.ppm
deleted file mode 100644
index 201615f..0000000
--- a/android_bench_suite/panorama_input/test_026.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_027.ppm b/android_bench_suite/panorama_input/test_027.ppm
deleted file mode 100644
index 07cf426..0000000
--- a/android_bench_suite/panorama_input/test_027.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_028.ppm b/android_bench_suite/panorama_input/test_028.ppm
deleted file mode 100644
index aedb023..0000000
--- a/android_bench_suite/panorama_input/test_028.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_029.ppm b/android_bench_suite/panorama_input/test_029.ppm
deleted file mode 100644
index 9a0d398..0000000
--- a/android_bench_suite/panorama_input/test_029.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_030.ppm b/android_bench_suite/panorama_input/test_030.ppm
deleted file mode 100644
index 26a8f53..0000000
--- a/android_bench_suite/panorama_input/test_030.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_031.ppm b/android_bench_suite/panorama_input/test_031.ppm
deleted file mode 100644
index 2300461..0000000
--- a/android_bench_suite/panorama_input/test_031.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_032.ppm b/android_bench_suite/panorama_input/test_032.ppm
deleted file mode 100644
index f5e93f8..0000000
--- a/android_bench_suite/panorama_input/test_032.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_033.ppm b/android_bench_suite/panorama_input/test_033.ppm
deleted file mode 100644
index c2f8ad9..0000000
--- a/android_bench_suite/panorama_input/test_033.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_034.ppm b/android_bench_suite/panorama_input/test_034.ppm
deleted file mode 100644
index de93b23..0000000
--- a/android_bench_suite/panorama_input/test_034.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_035.ppm b/android_bench_suite/panorama_input/test_035.ppm
deleted file mode 100644
index 62198de..0000000
--- a/android_bench_suite/panorama_input/test_035.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_036.ppm b/android_bench_suite/panorama_input/test_036.ppm
deleted file mode 100644
index bf252e4..0000000
--- a/android_bench_suite/panorama_input/test_036.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_037.ppm b/android_bench_suite/panorama_input/test_037.ppm
deleted file mode 100644
index 7cc7ace..0000000
--- a/android_bench_suite/panorama_input/test_037.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/panorama_input/test_038.ppm b/android_bench_suite/panorama_input/test_038.ppm
deleted file mode 100644
index d44e1f1..0000000
--- a/android_bench_suite/panorama_input/test_038.ppm
+++ /dev/null
Binary files differ
diff --git a/android_bench_suite/parse_result.py b/android_bench_suite/parse_result.py
deleted file mode 100644
index 90b3c4d..0000000
--- a/android_bench_suite/parse_result.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Helper functions to parse result collected from device"""
-
-from __future__ import print_function
-from fix_skia_results import _TransformBenchmarks
-
-import json
-
-def normalize(bench, dict_list):
- bench_base = {
- 'Panorama': 1,
- 'Dex2oat': 1,
- 'Hwui': 10000,
- 'Skia': 1,
- 'Synthmark': 1,
- 'Binder': 0.001
- }
- result_dict = dict_list[0]
- for key in result_dict:
- result_dict[key] = result_dict[key] / bench_base[bench]
- return [result_dict]
-
-
-# Functions to parse benchmark result for data collection.
-def parse_Panorama(bench, fin):
- result_dict = {}
- for line in fin:
- words = line.split()
- if 'elapsed' in words:
- #TODO: Need to restructure the embedded word counts.
- result_dict['total_time_s'] = float(words[3])
- result_dict['retval'] = 0
- return normalize(bench, [result_dict])
- raise ValueError('You passed the right type of thing, '
- 'but it didn\'t have the expected contents.')
-
-
-def parse_Synthmark(bench, fin):
- result_dict = {}
- accum = 0
- cnt = 0
- for line in fin:
- words = line.split()
- if 'normalized' in words:
- #TODO: Need to restructure the embedded word counts.
- accum += float(words[-1])
- cnt += 1
- if accum != 0:
- result_dict['total_voices'] = accum / cnt
- result_dict['retval'] = 0
- return normalize(bench, [result_dict])
- raise ValueError('You passed the right type of thing, '
- 'but it didn\'t have the expected contents.')
-
-
-def parse_Binder(bench, fin):
- result_dict = {}
- accum = 0
- cnt = 0
- for line in fin:
- words = line.split()
- for word in words:
- if 'average' in word:
- #TODO: Need to restructure the embedded word counts.
- accum += float(word[8:-2])
- cnt += 1
- if accum != 0:
- result_dict['avg_time_ms'] = accum / cnt
- result_dict['retval'] = 0
- return normalize(bench, [result_dict])
- raise ValueError('You passed the right type of thing, '
- 'but it didn\'t have the expected contents.')
-
-
-def parse_Dex2oat(bench, fin):
- result_dict = {}
- cnt = 0
- for line in fin:
- words = line.split()
- if 'elapsed' in words:
- cnt += 1
- #TODO: Need to restructure the embedded word counts.
- if cnt == 1:
- # First 'elapsed' time is for microbench 'Chrome'
- result_dict['chrome_s'] = float(words[3])
- elif cnt == 2:
- # Second 'elapsed' time is for microbench 'Camera'
- result_dict['camera_s'] = float(words[3])
-
- result_dict['retval'] = 0
- # Two results found, return
- return normalize(bench, [result_dict])
- raise ValueError('You passed the right type of thing, '
- 'but it didn\'t have the expected contents.')
-
-
-def parse_Hwui(bench, fin):
- result_dict = {}
- for line in fin:
- words = line.split()
- if 'elapsed' in words:
- #TODO: Need to restructure the embedded word counts.
- result_dict['total_time_s'] = float(words[3])
- result_dict['retval'] = 0
- return normalize(bench, [result_dict])
- raise ValueError('You passed the right type of thing, '
- 'but it didn\'t have the expected contents.')
-
-
-def parse_Skia(bench, fin):
- obj = json.load(fin)
- return normalize(bench, _TransformBenchmarks(obj))
diff --git a/android_bench_suite/run.py b/android_bench_suite/run.py
deleted file mode 100755
index 19d9b36..0000000
--- a/android_bench_suite/run.py
+++ /dev/null
@@ -1,481 +0,0 @@
-#!/usr/bin/env python2
-#
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-#
-# pylint: disable=cros-logging-import
-
-# This is the script to run specified benchmark with different toolchain
-# settings. It includes the process of building benchmark locally and running
-# benchmark on DUT.
-
-"""Main script to run the benchmark suite from building to testing."""
-from __future__ import print_function
-
-import argparse
-import config
-import ConfigParser
-import logging
-import os
-import subprocess
-import sys
-
-logging.basicConfig(level=logging.INFO)
-
-def _parse_arguments(argv):
- parser = argparse.ArgumentParser(description='Build and run specific '
- 'benchamrk')
- parser.add_argument(
- '-b',
- '--bench',
- action='append',
- default=[],
- help='Select which benchmark to run')
-
- # Only one of compiler directory and llvm prebuilts version can be indicated
- # at the beginning, so set -c and -l into a exclusive group.
- group = parser.add_mutually_exclusive_group()
-
- # The toolchain setting arguments has action of 'append', so that users
- # could compare performance with several toolchain settings together.
- group.add_argument(
- '-c',
- '--compiler_dir',
- metavar='DIR',
- action='append',
- default=[],
- help='Specify path to the compiler\'s bin directory. '
- 'You shall give several paths, each with a -c, to '
- 'compare performance differences in '
- 'each compiler.')
-
- parser.add_argument(
- '-o',
- '--build_os',
- action='append',
- default=[],
- help='Specify the host OS to build the benchmark.')
-
- group.add_argument(
- '-l',
- '--llvm_prebuilts_version',
- action='append',
- default=[],
- help='Specify the version of prebuilt LLVM. When '
- 'specific prebuilt version of LLVM already '
- 'exists, no need to pass the path to compiler '
- 'directory.')
-
- parser.add_argument(
- '-f',
- '--cflags',
- action='append',
- default=[],
- help='Specify the cflags options for the toolchain. '
- 'Be sure to quote all the cflags with quotation '
- 'mark("") or use equal(=).')
- parser.add_argument(
- '--ldflags',
- action='append',
- default=[],
- help='Specify linker flags for the toolchain.')
-
- parser.add_argument(
- '-i',
- '--iterations',
- type=int,
- default=1,
- help='Specify how many iterations does the test '
- 'take.')
-
- # Arguments -s and -r are for connecting to DUT.
- parser.add_argument(
- '-s',
- '--serials',
- help='Comma separate list of device serials under '
- 'test.')
-
- parser.add_argument(
- '-r',
- '--remote',
- default='localhost',
- help='hostname[:port] if the ADB device is connected '
- 'to a remote machine. Ensure this workstation '
- 'is configured for passwordless ssh access as '
- 'users "root" or "adb"')
-
- # Arguments -frequency and -m are for device settings
- parser.add_argument(
- '--frequency',
- type=int,
- default=960000,
- help='Specify the CPU frequency of the device. The '
- 'unit is KHZ. The available value is defined in'
- 'cpufreq/scaling_available_frequency file in '
- 'device\'s each core directory. '
- 'The default value is 960000, which shows a '
- 'balance in noise and performance. Lower '
- 'frequency will slow down the performance but '
- 'reduce noise.')
-
- parser.add_argument(
- '-m',
- '--mode',
- default='little',
- help='User can specify whether \'little\' or \'big\' '
- 'mode to use. The default one is little mode. '
- 'The little mode runs on a single core of '
- 'Cortex-A53, while big mode runs on single core '
- 'of Cortex-A57.')
-
- # Configure file for benchmark test
- parser.add_argument(
- '-t',
- '--test',
- help='Specify the test settings with configuration '
- 'file.')
-
- # Whether to keep old json result or not
- parser.add_argument(
- '-k',
- '--keep',
- default='False',
- help='User can specify whether to keep the old json '
- 'results from last run. This can be useful if you '
- 'want to compare performance differences in two or '
- 'more different runs. Default is False(off).')
-
- return parser.parse_args(argv)
-
-
-# Clear old log files in bench suite directory
-def clear_logs():
- logging.info('Removing old logfiles...')
- for f in ['build_log', 'device_log', 'test_log']:
- logfile = os.path.join(config.bench_suite_dir, f)
- try:
- os.remove(logfile)
- except OSError:
- logging.info('No logfile %s need to be removed. Ignored.', f)
- logging.info('Old logfiles been removed.')
-
-
-# Clear old json files in bench suite directory
-def clear_results():
- logging.info('Clearing old json results...')
- for bench in config.bench_list:
- result = os.path.join(config.bench_suite_dir, bench + '.json')
- try:
- os.remove(result)
- except OSError:
- logging.info('no %s json file need to be removed. Ignored.', bench)
- logging.info('Old json results been removed.')
-
-
-# Use subprocess.check_call to run other script, and put logs to files
-def check_call_with_log(cmd, log_file):
- log_file = os.path.join(config.bench_suite_dir, log_file)
- with open(log_file, 'a') as logfile:
- log_header = 'Log for command: %s\n' % (cmd)
- logfile.write(log_header)
- try:
- subprocess.check_call(cmd, stdout=logfile)
- except subprocess.CalledProcessError:
- logging.error('Error running %s, please check %s for more info.', cmd,
- log_file)
- raise
- logging.info('Logs for %s are written to %s.', cmd, log_file)
-
-
-def set_device(serials, remote, frequency):
- setting_cmd = [
- os.path.join(
- os.path.join(config.android_home, config.autotest_dir),
- 'site_utils/set_device.py')
- ]
- setting_cmd.append('-r=' + remote)
- setting_cmd.append('-q=' + str(frequency))
-
- # Deal with serials.
- # If there is no serails specified, try to run test on the only device.
- # If specified, split the serials into a list and run test on each device.
- if serials:
- for serial in serials.split(','):
- setting_cmd.append('-s=' + serial)
- check_call_with_log(setting_cmd, 'device_log')
- setting_cmd.pop()
- else:
- check_call_with_log(setting_cmd, 'device_log')
-
- logging.info('CPU mode and frequency set successfully!')
-
-
-def log_ambiguous_args():
- logging.error('The count of arguments does not match!')
- raise ValueError('The count of arguments does not match.')
-
-
-# Check if the count of building arguments are log_ambiguous or not. The
-# number of -c/-l, -f, and -os should be either all 0s or all the same.
-def check_count(compiler, llvm_version, build_os, cflags, ldflags):
- # Count will be set to 0 if no compiler or llvm_version specified.
- # Otherwise, one of these two args length should be 0 and count will be
- # the other one.
- count = max(len(compiler), len(llvm_version))
-
- # Check if number of cflags is 0 or the same with before.
- if len(cflags) != 0:
- if count != 0 and len(cflags) != count:
- log_ambiguous_args()
- count = len(cflags)
-
- if len(ldflags) != 0:
- if count != 0 and len(ldflags) != count:
- log_ambiguous_args()
- count = len(ldflags)
-
- if len(build_os) != 0:
- if count != 0 and len(build_os) != count:
- log_ambiguous_args()
- count = len(build_os)
-
- # If no settings are passed, only run default once.
- return max(1, count)
-
-
-# Build benchmark binary with toolchain settings
-def build_bench(setting_no, bench, compiler, llvm_version, build_os, cflags,
- ldflags):
- # Build benchmark locally
- build_cmd = ['./build_bench.py', '-b=' + bench]
- if compiler:
- build_cmd.append('-c=' + compiler[setting_no])
- if llvm_version:
- build_cmd.append('-l=' + llvm_version[setting_no])
- if build_os:
- build_cmd.append('-o=' + build_os[setting_no])
- if cflags:
- build_cmd.append('-f=' + cflags[setting_no])
- if ldflags:
- build_cmd.append('--ldflags=' + ldflags[setting_no])
-
- logging.info('Building benchmark for toolchain setting No.%d...', setting_no)
- logging.info('Command: %s', build_cmd)
-
- try:
- subprocess.check_call(build_cmd)
- except:
- logging.error('Error while building benchmark!')
- raise
-
-
-def run_and_collect_result(test_cmd, setting_no, i, bench, serial='default'):
-
- # Run autotest script for benchmark on DUT
- check_call_with_log(test_cmd, 'test_log')
-
- logging.info('Benchmark with setting No.%d, iter.%d finished testing on '
- 'device %s.', setting_no, i, serial)
-
- # Rename results from the bench_result generated in autotest
- bench_result = os.path.join(config.bench_suite_dir, 'bench_result')
- if not os.path.exists(bench_result):
- logging.error('No result found at %s, '
- 'please check test_log for details.', bench_result)
- raise OSError('Result file %s not found.' % bench_result)
-
- new_bench_result = 'bench_result_%s_%s_%d_%d' % (bench, serial, setting_no, i)
- new_bench_result_path = os.path.join(config.bench_suite_dir, new_bench_result)
- try:
- os.rename(bench_result, new_bench_result_path)
- except OSError:
- logging.error('Error while renaming raw result %s to %s', bench_result,
- new_bench_result_path)
- raise
-
- logging.info('Benchmark result saved at %s.', new_bench_result_path)
-
-
-def test_bench(bench, setting_no, iterations, serials, remote, mode):
- logging.info('Start running benchmark on device...')
-
- # Run benchmark and tests on DUT
- for i in range(iterations):
- logging.info('Iteration No.%d:', i)
- test_cmd = [
- os.path.join(
- os.path.join(config.android_home, config.autotest_dir),
- 'site_utils/test_bench.py')
- ]
- test_cmd.append('-b=' + bench)
- test_cmd.append('-r=' + remote)
- test_cmd.append('-m=' + mode)
-
- # Deal with serials.
- # If there is no serails specified, try to run test on the only device.
- # If specified, split the serials into a list and run test on each device.
- if serials:
- for serial in serials.split(','):
- test_cmd.append('-s=' + serial)
-
- run_and_collect_result(test_cmd, setting_no, i, bench, serial)
- test_cmd.pop()
- else:
- run_and_collect_result(test_cmd, setting_no, i, bench)
-
-
-def gen_json(bench, setting_no, iterations, serials):
- bench_result = os.path.join(config.bench_suite_dir, 'bench_result')
-
- logging.info('Generating JSON file for Crosperf...')
-
- if not serials:
- serials = 'default'
-
- for serial in serials.split(','):
-
- # Platform will be used as device lunch combo instead
- #experiment = '_'.join([serial, str(setting_no)])
- experiment = config.product_combo
-
- # Input format: bench_result_{bench}_{serial}_{setting_no}_
- input_file = '_'.join([bench_result, bench, serial, str(setting_no), ''])
- gen_json_cmd = [
- './gen_json.py', '--input=' + input_file,
- '--output=%s.json' % os.path.join(config.bench_suite_dir, bench),
- '--bench=' + bench, '--platform=' + experiment,
- '--iterations=' + str(iterations)
- ]
-
- logging.info('Command: %s', gen_json_cmd)
- if subprocess.call(gen_json_cmd):
- logging.error('Error while generating JSON file, please check raw data'
- 'of the results at %s.', input_file)
-
-
-def gen_crosperf(infile, outfile):
- # Set environment variable for crosperf
- os.environ['PYTHONPATH'] = os.path.dirname(config.toolchain_utils)
-
- logging.info('Generating Crosperf Report...')
- crosperf_cmd = [
- os.path.join(config.toolchain_utils, 'generate_report.py'),
- '-i=' + infile, '-o=' + outfile, '-f'
- ]
-
- # Run crosperf generate_report.py
- logging.info('Command: %s', crosperf_cmd)
- subprocess.call(crosperf_cmd)
-
- logging.info('Report generated successfully!')
- logging.info('Report Location: ' + outfile + '.html at bench'
- 'suite directory.')
-
-
-def main(argv):
- # Set environment variable for the local loacation of benchmark suite.
- # This is for collecting testing results to benchmark suite directory.
- os.environ['BENCH_SUITE_DIR'] = config.bench_suite_dir
-
- # Set Android type, used for the difference part between aosp and internal.
- os.environ['ANDROID_TYPE'] = config.android_type
-
- # Set ANDROID_HOME for both building and testing.
- os.environ['ANDROID_HOME'] = config.android_home
-
- # Set environment variable for architecture, this will be used in
- # autotest.
- os.environ['PRODUCT'] = config.product
-
- arguments = _parse_arguments(argv)
-
- bench_list = arguments.bench
- if not bench_list:
- bench_list = config.bench_list
-
- compiler = arguments.compiler_dir
- build_os = arguments.build_os
- llvm_version = arguments.llvm_prebuilts_version
- cflags = arguments.cflags
- ldflags = arguments.ldflags
- iterations = arguments.iterations
- serials = arguments.serials
- remote = arguments.remote
- frequency = arguments.frequency
- mode = arguments.mode
- keep = arguments.keep
-
- # Clear old logs every time before run script
- clear_logs()
-
- if keep == 'False':
- clear_results()
-
- # Set test mode and frequency of CPU on the DUT
- set_device(serials, remote, frequency)
-
- test = arguments.test
- # if test configuration file has been given, use the build settings
- # in the configuration file and run the test.
- if test:
- test_config = ConfigParser.ConfigParser(allow_no_value=True)
- if not test_config.read(test):
- logging.error('Error while reading from building '
- 'configuration file %s.', test)
- raise RuntimeError('Error while reading configuration file %s.' % test)
-
- for setting_no, section in enumerate(test_config.sections()):
- bench = test_config.get(section, 'bench')
- compiler = [test_config.get(section, 'compiler')]
- build_os = [test_config.get(section, 'build_os')]
- llvm_version = [test_config.get(section, 'llvm_version')]
- cflags = [test_config.get(section, 'cflags')]
- ldflags = [test_config.get(section, 'ldflags')]
-
- # Set iterations from test_config file, if not exist, use the one from
- # command line.
- it = test_config.get(section, 'iterations')
- if not it:
- it = iterations
- it = int(it)
-
- # Build benchmark for each single test configuration
- build_bench(0, bench, compiler, llvm_version, build_os, cflags, ldflags)
-
- test_bench(bench, setting_no, it, serials, remote, mode)
-
- gen_json(bench, setting_no, it, serials)
-
- for bench in config.bench_list:
- infile = os.path.join(config.bench_suite_dir, bench + '.json')
- if os.path.exists(infile):
- outfile = os.path.join(config.bench_suite_dir, bench + '_report')
- gen_crosperf(infile, outfile)
-
- # Stop script if there is only config file provided
- return 0
-
- # If no configuration file specified, continue running.
- # Check if the count of the setting arguments are log_ambiguous.
- setting_count = check_count(compiler, llvm_version, build_os, cflags, ldflags)
-
- for bench in bench_list:
- logging.info('Start building and running benchmark: [%s]', bench)
- # Run script for each toolchain settings
- for setting_no in range(setting_count):
- build_bench(setting_no, bench, compiler, llvm_version, build_os, cflags,
- ldflags)
-
- # Run autotest script for benchmark test on device
- test_bench(bench, setting_no, iterations, serials, remote, mode)
-
- gen_json(bench, setting_no, iterations, serials)
-
- infile = os.path.join(config.bench_suite_dir, bench + '.json')
- outfile = os.path.join(config.bench_suite_dir, bench + '_report')
- gen_crosperf(infile, outfile)
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/android_bench_suite/set_flags.py b/android_bench_suite/set_flags.py
deleted file mode 100644
index a243c7c..0000000
--- a/android_bench_suite/set_flags.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-"""Helper functions to put user defined flags to mk/bp files"""
-
-from __future__ import print_function
-
-import config
-import os
-import subprocess
-
-
-# Find the makefile/blueprint based on the benchmark, and make a copy of
-# it for restoring later.
-def backup_file(bench, file_type):
- mk_file = os.path.join(config.android_home, config.bench_dict[bench],
- 'Android.' + file_type)
- try:
- # Make a copy of the makefile/blueprint so that we can recover it after
- # building the benchmark
- subprocess.check_call([
- 'cp', mk_file,
- os.path.join(config.android_home, config.bench_dict[bench],
- 'tmp_makefile')
- ])
- except subprocess.CalledProcessError():
- raise OSError('Cannot backup Android.%s file for %s' % (file_type, bench))
-
-
-# Insert lines to add LOCAL_CFLAGS/LOCAL_LDFLAGS to the benchmarks
-# makefile/blueprint
-def replace_flags(bench, android_type, file_type, cflags, ldflags):
- # Use format ["Flag1", "Flag2"] for bp file
- if file_type == 'bp':
- if cflags:
- cflags = '\", \"'.join(cflags.split())
- if ldflags:
- ldflags = '\", \"'.join(ldflags.split())
-
- if not cflags:
- cflags = ''
- else:
- cflags = '\"' + cflags + '\",'
- if not ldflags:
- ldflags = ''
- else:
- ldflags = '\"' + ldflags + '\",'
-
- # Two different diffs are used for aosp or internal android repo.
- if android_type == 'aosp':
- bench_diff = bench + '_flags_aosp.diff'
- else:
- bench_diff = bench + '_flags_internal.diff'
-
- # Replace CFLAGS_FOR_BENCH_SUITE marker with proper cflags
- output = ''
- with open(bench_diff) as f:
- for line in f:
- line = line.replace('CFLAGS_FOR_BENCH_SUITE', cflags)
- line = line.replace('LDFLAGS_FOR_BENCH_SUITE', ldflags)
- output += line
-
- with open('modified.diff', 'w') as f:
- f.write(output)
-
-
-def apply_patches(bench):
- bench_dir = os.path.join(config.android_home, config.bench_dict[bench])
- bench_diff = 'modified.diff'
- flags_patch = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), bench_diff)
- try:
- subprocess.check_call(['git', '-C', bench_dir, 'apply', flags_patch])
- except subprocess.CalledProcessError:
- raise OSError('Patch for adding flags for %s does not succeed.' % (bench))
-
-
-def replace_flags_in_dir(bench, cflags, ldflags):
- bench_mk = os.path.join(config.android_home, config.bench_dict[bench],
- 'Android.mk')
-
- if not cflags:
- cflags = ''
- if not ldflags:
- ldflags = ''
-
- output = ''
- with open(bench_mk) as f:
- for line in f:
- line = line.replace('$(CFLAGS_FOR_BENCH_SUITE)', cflags)
- line = line.replace('$(LDFLAGS_FOR_BENCH_SUITE)', ldflags)
- output += line
- with open(bench_mk, 'w') as f:
- f.write(output)
-
-
-def add_flags_Panorama(cflags, ldflags):
- backup_file('Panorama', 'mk')
- replace_flags_in_dir('Panorama', cflags, ldflags)
-
-
-def add_flags_Synthmark(cflags, ldflags):
- backup_file('Synthmark', 'mk')
- replace_flags_in_dir('Synthmark', cflags, ldflags)
-
-
-def add_flags_Skia(cflags, ldflags):
- backup_file('Skia', 'bp')
- replace_flags('Skia', config.android_type, 'bp', cflags, ldflags)
- apply_patches('Skia')
-
-
-def add_flags_Binder(cflags, ldflags):
- backup_file('Binder', 'bp')
- replace_flags('Binder', config.android_type, 'bp', cflags, ldflags)
- apply_patches('Binder')
-
-
-def add_flags_Hwui(cflags, ldflags):
- backup_file('Hwui', 'bp')
- replace_flags('Hwui', config.android_type, 'bp', cflags, ldflags)
- apply_patches('Hwui')
-
-
-def add_flags_Dex2oat(cflags, ldflags):
- backup_file('Dex2oat', 'bp')
- replace_flags('Dex2oat', config.android_type, 'bp', cflags, ldflags)
- apply_patches('Dex2oat')
diff --git a/android_bench_suite/skia_aosp.diff b/android_bench_suite/skia_aosp.diff
deleted file mode 100644
index 269e02a..0000000
--- a/android_bench_suite/skia_aosp.diff
+++ /dev/null
@@ -1,62 +0,0 @@
-diff --git a/bench/ResultsWriter.h b/bench/ResultsWriter.h
-index f56deae..69a84c7 100644
---- a/bench/ResultsWriter.h
-+++ b/bench/ResultsWriter.h
-@@ -46,6 +46,9 @@ public:
- // Record a single test metric.
- virtual void metric(const char name[], double ms) {}
-
-+ // Record a list of test metrics.
-+ virtual void metrics(const char name[], const SkTArray<double> &array) {}
-+
- // Flush to storage now please.
- virtual void flush() {}
- };
-@@ -113,6 +116,17 @@ public:
- SkASSERT(fConfig);
- (*fConfig)[name] = ms;
- }
-+ void metrics(const char name[], const SkTArray<double> &array) override {
-+ // The user who wrote this feature prefers NaNs over not having results.
-+ // Hence, this ignores whether we have NaNs.
-+ SkASSERT(fConfig);
-+ Json::Value value = Json::Value(Json::arrayValue);
-+ value.resize(array.count());
-+ for (unsigned i = 0, e = array.count(); i != e; ++i) {
-+ value[i] = array[i];
-+ }
-+ (*fConfig)[name] = value;
-+ }
-
- // Flush to storage now please.
- void flush() override {
-diff --git a/bench/nanobench.cpp b/bench/nanobench.cpp
-index ae415fa..22011cd 100644
---- a/bench/nanobench.cpp
-+++ b/bench/nanobench.cpp
-@@ -42,6 +42,7 @@
- #include "SkSurface.h"
- #include "SkTaskGroup.h"
- #include "SkThreadUtils.h"
-+#include "SkTypes.h"
- #include "ThermalManager.h"
-
- #include <stdlib.h>
-@@ -1173,7 +1174,7 @@ int nanobench_main() {
- target->setup();
- bench->perCanvasPreDraw(canvas);
-
-- int maxFrameLag;
-+ int maxFrameLag = 0;
- int loops = target->needsFrameTiming(&maxFrameLag)
- ? setup_gpu_bench(target, bench.get(), maxFrameLag)
- : setup_cpu_bench(overhead, target, bench.get());
-@@ -1197,6 +1198,7 @@ int nanobench_main() {
- benchStream.fillCurrentOptions(log.get());
- target->fillOptions(log.get());
- log->metric("min_ms", stats.min);
- log->metric("median_ms", stats.median);
-+ log->metrics("samples", samples);
- #if SK_SUPPORT_GPU
- if (gpuStatsDump) {
- // dump to json, only SKPBench currently returns valid keys / values
diff --git a/android_bench_suite/skia_internal.diff b/android_bench_suite/skia_internal.diff
deleted file mode 100644
index f6b1961..0000000
--- a/android_bench_suite/skia_internal.diff
+++ /dev/null
@@ -1,61 +0,0 @@
-diff --git a/bench/ResultsWriter.h b/bench/ResultsWriter.h
-index f56deae..69a84c7 100644
---- a/bench/ResultsWriter.h
-+++ b/bench/ResultsWriter.h
-@@ -46,6 +46,9 @@ public:
- // Record a single test metric.
- virtual void metric(const char name[], double ms) {}
-
-+ // Record a list of test metrics.
-+ virtual void metrics(const char name[], const SkTArray<double> &array) {}
-+
- // Flush to storage now please.
- virtual void flush() {}
- };
-@@ -113,6 +116,17 @@ public:
- SkASSERT(fConfig);
- (*fConfig)[name] = ms;
- }
-+ void metrics(const char name[], const SkTArray<double> &array) override {
-+ // The user who wrote this feature prefers NaNs over not having results.
-+ // Hence, this ignores whether we have NaNs.
-+ SkASSERT(fConfig);
-+ Json::Value value = Json::Value(Json::arrayValue);
-+ value.resize(array.count());
-+ for (unsigned i = 0, e = array.count(); i != e; ++i) {
-+ value[i] = array[i];
-+ }
-+ (*fConfig)[name] = value;
-+ }
-
- // Flush to storage now please.
- void flush() override {
-diff --git a/bench/nanobench.cpp b/bench/nanobench.cpp
-index 0651302..0623d61 100644
---- a/bench/nanobench.cpp
-+++ b/bench/nanobench.cpp
-@@ -43,6 +43,7 @@
- #include "SkSVGDOM.h"
- #include "SkTaskGroup.h"
- #include "SkThreadUtils.h"
-+#include "SkTypes.h"
- #include "ThermalManager.h"
- #include "SkScan.h"
-
-@@ -1240,7 +1241,7 @@ int nanobench_main() {
- target->setup();
- bench->perCanvasPreDraw(canvas);
-
-- int maxFrameLag;
-+ int maxFrameLag = 0;
- int loops = target->needsFrameTiming(&maxFrameLag)
- ? setup_gpu_bench(target, bench.get(), maxFrameLag)
- : setup_cpu_bench(overhead, target, bench.get());
-@@ -1290,6 +1291,7 @@ int nanobench_main() {
- benchStream.fillCurrentOptions(log.get());
- target->fillOptions(log.get());
- log->metric("min_ms", stats.min);
-+ log->metrics("samples", samples);
- #if SK_SUPPORT_GPU
- if (gpuStatsDump) {
- // dump to json, only SKPBench currently returns valid keys / values
diff --git a/android_bench_suite/synthmark.diff b/android_bench_suite/synthmark.diff
deleted file mode 100644
index db87cb9..0000000
--- a/android_bench_suite/synthmark.diff
+++ /dev/null
@@ -1,22 +0,0 @@
-diff --git a/Android.mk b/Android.mk
-index e1d89db..3970857 100644
---- a/Android.mk
-+++ b/Android.mk
-@@ -1 +1,16 @@
--# This file is intentionally empty, to prevent a platform build from descending further
-+# Copyright 2017 The Chromium OS Authors. All rights reserved.
-+# Use of this source code is governed by a BSD-style license that can be
-+# found in the LICENSE file.
-+
-+LOCAL_PATH := $(call my-dir)
-+
-+include $(CLEAR_VARS)
-+LOCAL_MODULE_TAGS := tests
-+LOCAL_C_INCLUDES := $(LOCAL_PATH)/source
-+LOCAL_SRC_FILES:= apps/synthmark.cpp
-+LOCAL_CFLAGS += -g -std=c++11 -Ofast
-+LOCAL_CFLAGS += $(CFLAGS_FOR_BENCH_SUITE)
-+LOCAL_LDFLAGS += $(LDFLAGS_FOR_BENCH_SUITE)
-+#LOCAL_SHARED_LIBRARIES := libcutils libutils
-+LOCAL_MODULE := synthmark
-+include $(BUILD_EXECUTABLE)
diff --git a/android_bench_suite/test_config b/android_bench_suite/test_config
deleted file mode 100644
index ae2cff2..0000000
--- a/android_bench_suite/test_config
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright 2017 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-[Hwui_Test]
-bench = Hwui
-compiler =
-build_os =
-llvm_version =
-cflags =
-ldflags =
-iterations = 1
-
-[Skia_Test]
-bench = Skia
-compiler =
-build_os =
-llvm_version =
-cflags =
-ldflags =
-iterations = 1
-
-[Synthmark_Test]
-bench = Synthmark
-compiler =
-build_os =
-llvm_version =
-cflags =
-ldflags =
-iterations = 1
-
-[Binder_Test]
-bench = Binder
-compiler =
-build_os =
-llvm_version =
-cflags =
-ldflags =
-iterations = 1
-
-[Panorama_Test]
-bench = Panorama
-compiler =
-build_os =
-llvm_version =
-cflags =
-ldflags =
-iterations = 1
-
-[Dex2oat_Test]
-bench = Dex2oat
-compiler =
-build_os =
-llvm_version =
-cflags =
-ldflags =
-iterations = 1
diff --git a/android_merge_from_upstream.sh b/android_merge_from_upstream.sh
index b63eb6e..4716a25 100755
--- a/android_merge_from_upstream.sh
+++ b/android_merge_from_upstream.sh
@@ -11,9 +11,9 @@
# https://android-review.googlesource.com/c/platform/external/toolchain-utils/+/1132504/1
local_branch_name="merge_with_upstream"
-local_upstream="aosp/master"
+local_upstream="aosp/master" # nocheck
remote="aosp"
-remote_branch="${remote}/upstream-mirror-master"
+remote_branch="${remote}/upstream-main" # nocheck
my_dir="$(dirname "$(readlink -m "$0")")"
cd "${my_dir}"
@@ -21,7 +21,7 @@
ensure_head_is_upstream_main() {
local current_rev main_rev
current_rev="$(git rev-parse HEAD)"
- main_rev="$(git rev-parse ${local_upstream})"
+ main_rev="$(git rev-parse "${local_upstream}")"
if [[ "${current_rev}" != "${main_rev}" ]]; then
echo "Please checkout ${local_upstream} and rerun this" >&2
exit
@@ -49,7 +49,7 @@
get_merge_commit_list() {
local merge_base
- merge_base="$(git merge-base HEAD ${remote_branch})"
+ merge_base="$(git merge-base HEAD "${remote_branch}")"
git log --oneline "${merge_base}..${remote_branch}"
}
@@ -62,6 +62,9 @@
commit_list="$(get_merge_commit_list)"
num_commits="$(wc -l <<< "${commit_list}")"
+
+# Disable shellcheck for the sed substitution warning.
+# shellcheck disable=SC2001
commit_message="Merging ${num_commits} commit(s) from Chromium's toolchain-utils
Merged commit digest:
@@ -69,6 +72,6 @@
"
git merge "${remote_branch}" -m "${commit_message}"
-echo 'NOTE: When you try to `repo upload`, repo might show a scary warning'
+echo 'NOTE: When you try to "repo upload", repo might show a scary warning'
echo 'about the number of changes are being uploaded. That should be fine,'
echo 'since repo will only create CLs for commits not known to our remote.'
diff --git a/binary_search_tool/README.bisect.md b/binary_search_tool/README.bisect.md
index bd9e0f1..32f4cba 100644
--- a/binary_search_tool/README.bisect.md
+++ b/binary_search_tool/README.bisect.md
@@ -75,7 +75,9 @@
* board: The board to bisect on. For example: daisy, falco, etc.
* remote: The IP address of the physical machine you're using to test with.
-* package: The package to bisect with. For example: chromeos-chrome
+* package: The package to bisect with. For example: chromeos-chrome.
+* use_flags: (Optional) Use flags for emerge. For example: "-thinlto -cfi".
+* noreboot: (Optional) Do not reboot after updating the package.
* dir: (Optional) the directory for your good/bad build trees. Defaults to
$BISECT_DIR or /tmp/sysroot_bisect. This value will set $BISECT_DIR
for all bisecting scripts.
diff --git a/binary_search_tool/common/test_setup.sh b/binary_search_tool/common/test_setup.sh
index 4fa1abe..3ea7327 100755
--- a/binary_search_tool/common/test_setup.sh
+++ b/binary_search_tool/common/test_setup.sh
@@ -1,6 +1,8 @@
#!/bin/bash
#
-# Copyright 2016 Google Inc. All Rights Reserved.
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
#
# This is a generic ChromeOS package/image test setup script. It is meant to
# be used for either the object file or package bisection tools. This script
@@ -94,12 +96,29 @@
if [[ "${BISECT_MODE}" == "OBJECT_MODE" ]]; then
echo "EMERGING ${BISECT_PACKAGE}"
- echo "sudo rm -rf /build/${BISECT_BOARD}/var/cache/portage/*"
+ set -x
sudo rm -rf /build/${BISECT_BOARD}/var/cache/portage/*
- echo "sudo rm -rf /build/${BISECT_BOARD}/tmp/portage/${BISECT_PACKAGE}*"
sudo rm -rf /build/${BISECT_BOARD}/tmp/portage/${BISECT_PACKAGE}*
+ set +x
+ if [[ ${BISECT_PACKAGE} == *chromeos-chrome ]]; then
+ if [[ ${BISECT_USE_FLAGS} == *chrome_internal* && \
+ ${BISECT_USE_FLAGS} != *-chrome_internal* ]]; then
+ # for the pre-upload check of the length of lines
+ chrome_build_dir="/var/cache/chromeos-chrome/chrome-src-internal/src/"
+ chrome_build_dir+="out_${BISECT_BOARD}"
+ else
+ # for the pre-upload check of the length of lines
+ chrome_build_dir="/var/cache/chromeos-chrome/chrome-src/src/"
+ chrome_build_dir+="out_${BISECT_BOARD}"
+ fi
+ set -x
+ sudo rm -rf ${chrome_build_dir}
+ set +x
+ fi
+ set -x
CLEAN_DELAY=0 emerge-${BISECT_BOARD} -C ${BISECT_PACKAGE}
- emerge-${BISECT_BOARD} ${BISECT_PACKAGE}
+ USE="${BISECT_USE_FLAGS}" emerge-${BISECT_BOARD} ${BISECT_PACKAGE}
+ set +x
emerge_status=$?
if [[ ${emerge_status} -ne 0 ]] ; then
@@ -108,20 +127,43 @@
fi
echo
- echo "DEPLOYING"
+ echo "DEPLOYING TO ${BISECT_REMOTE}"
- if [[ ${BISECT_PACKAGE} == sys-kernel/chromeos-kernel-* ]]; then
- echo "/mnt/host/source/src/scripts/update_kernel.sh " \
- "--remote=${BISECT_REMOTE} --board=${BISECT_BOARD}"
+ if [[ ${BISECT_PACKAGE} == *chromeos-kernel-* ]]; then
+ cmd="/mnt/host/source/src/scripts/update_kernel.sh --board=${BISECT_BOARD} --remote=${BISECT_REMOTE}"
+ if [[ ${BISECT_REMOTE} == *:* ]]; then
+ IP=$(echo $1 | cut -d ":" -f1)
+ PORT=$(echo $1 | cut -d ":" -f2)
+ cmd="/mnt/host/source/src/scripts/update_kernel.sh --board=${BISECT_BOARD} --remote=${IP} --ssh_port=${PORT}"
+ fi
+ if [[ ${BISECT_REBOOT_OPTION} == false ]]; then
+ cmd+=" --noreboot"
+ fi
+ set -x
# exec the command to make sure it always exit after
- exec /mnt/host/source/src/scripts/update_kernel.sh --remote=${BISECT_REMOTE} --board=${BISECT_BOARD}
+ exec $cmd
+ set +x
fi
- echo "cros deploy ${BISECT_REMOTE} ${BISECT_PACKAGE}"
- cros deploy ${BISECT_REMOTE} ${BISECT_PACKAGE} --log-level=info
-
+ if [[ ${BISECT_PACKAGE} == *chromeos-chrome ]]; then
+ # deploy_chrome needs to run inside chrome source tree
+ pushd ~/chrome_root
+ set -x
+ deploy_chrome --force --build-dir=${chrome_build_dir}/Release \
+ --device=${BISECT_REMOTE}
+ set +x
+ popd
+ else
+ set -x
+ cros deploy ${BISECT_REMOTE} ${BISECT_PACKAGE} --log-level=info
+ set +x
+ fi
deploy_status=$?
+ if [[ ${BISECT_REBOOT_OPTION} == false ]]; then
+ exit 0
+ fi
+
if [[ ${deploy_status} -eq 0 ]] ; then
echo "Deploy successful. Rebooting device..."
reboot
@@ -139,7 +181,8 @@
echo "BUILDING IMAGE"
pushd ~/trunk/src/scripts
-./build_image test --board=${BISECT_BOARD} --noenable_rootfs_verification --noeclean
+USE="${BISECT_USE_FLAGS}" ./build_image test --board=${BISECT_BOARD} \
+ --noenable_rootfs_verification --noeclean
build_status=$?
popd
diff --git a/binary_search_tool/run_bisect.py b/binary_search_tool/run_bisect.py
index ef1048b..249b9cf 100755
--- a/binary_search_tool/run_bisect.py
+++ b/binary_search_tool/run_bisect.py
@@ -12,6 +12,7 @@
import argparse
from argparse import RawTextHelpFormatter
import os
+import shlex
import sys
from binary_search_tool import binary_search_state
@@ -141,8 +142,8 @@
'prune': True,
'file_args': True
}
- self.setup_cmd = ('%s %s %s' % (self.cros_pkg_setup, self.options.board,
- self.options.remote))
+ self.setup_cmd = ' '.join(
+ (self.cros_pkg_setup, self.options.board, self.options.remote))
self.ArgOverride(self.default_kwargs, self.overrides)
def PreRun(self):
@@ -192,9 +193,10 @@
if options.dir:
os.environ['BISECT_DIR'] = options.dir
self.options.dir = os.environ.get('BISECT_DIR', '/tmp/sysroot_bisect')
- self.setup_cmd = (
- '%s %s %s %s' % (self.sysroot_wrapper_setup, self.options.board,
- self.options.remote, self.options.package))
+ self.setup_cmd = ' '.join(
+ (self.sysroot_wrapper_setup, self.options.board, self.options.remote,
+ self.options.package, str(self.options.reboot).lower(),
+ shlex.quote(self.options.use_flags)))
self.ArgOverride(self.default_kwargs, overrides)
@@ -253,8 +255,8 @@
if self.options.device_id:
device_id = "ANDROID_SERIAL='%s'" % self.options.device_id
- self.setup_cmd = ('%s %s %s %s' % (num_jobs, device_id, self.android_setup,
- self.options.android_src))
+ self.setup_cmd = ' '.join(
+ (num_jobs, device_id, self.android_setup, self.options.android_src))
self.ArgOverride(self.default_kwargs, overrides)
@@ -344,6 +346,16 @@
parser_object.add_argument('remote', help='Remote machine to test on')
parser_object.add_argument('package', help='Package to emerge and test')
parser_object.add_argument(
+ '--use_flags',
+ required=False,
+ default='',
+ help='Use flags passed to emerge')
+ parser_object.add_argument(
+ '--noreboot',
+ action='store_false',
+ dest='reboot',
+ help='Do not reboot after updating the package (default: False)')
+ parser_object.add_argument(
'--dir',
help=('Bisection directory to use, sets '
'$BISECT_DIR if provided. Defaults to '
diff --git a/binary_search_tool/sysroot_wrapper/README.md b/binary_search_tool/sysroot_wrapper/README.md
index 89904a0..77ce4b8 100644
--- a/binary_search_tool/sysroot_wrapper/README.md
+++ b/binary_search_tool/sysroot_wrapper/README.md
@@ -8,7 +8,7 @@
Before running the binary searcher tool you will need to run the setup script:
```
-./sysroot_wrapper/setup.sh ${board} ${remote_ip} ${package}
+./sysroot_wrapper/setup.sh ${board} ${remote_ip} ${package} ${reboot_option} ${use_flags}
```
This setup script will ensure your `$BISECT_DIR` is properly populated and
diff --git a/binary_search_tool/sysroot_wrapper/setup.sh b/binary_search_tool/sysroot_wrapper/setup.sh
index f5907f5..6b9b48f 100755
--- a/binary_search_tool/sysroot_wrapper/setup.sh
+++ b/binary_search_tool/sysroot_wrapper/setup.sh
@@ -1,6 +1,8 @@
#!/bin/bash -u
#
-# Copyright 2016 Google Inc. All Rights Reserved.
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
#
# This script is part of the ChromeOS object binary search triage process.
# It should be the first script called by the user, after the user has set up
@@ -23,6 +25,8 @@
BOARD=$1
REMOTE=$2
PACKAGE=$3
+REBOOT_OPTION=$4
+USE_FLAGS=$5
GOOD_BUILD=${bisect_dir}/good
BAD_BUILD=${bisect_dir}/bad
@@ -60,6 +64,8 @@
BISECT_BOARD=${BOARD}
BISECT_REMOTE=${REMOTE}
BISECT_PACKAGE=${PACKAGE}
+BISECT_REBOOT_OPTION=${REBOOT_OPTION}
+BISECT_USE_FLAGS="${USE_FLAGS}"
BISECT_MODE="OBJECT_MODE"
bisect_dir=${bisect_dir}
diff --git a/buildbot_test_llvm.py b/buildbot_test_llvm.py
index 968c67b..1c7bb19 100755
--- a/buildbot_test_llvm.py
+++ b/buildbot_test_llvm.py
@@ -32,7 +32,7 @@
CROSTC_ROOT = '/usr/local/google/crostc'
ROLE_ACCOUNT = 'mobiletc-prebuild'
TOOLCHAIN_DIR = os.path.dirname(os.path.realpath(__file__))
-MAIL_PROGRAM = '~/var/bin/mail-sheriff'
+MAIL_PROGRAM = '~/var/bin/mail-detective'
VALIDATION_RESULT_DIR = os.path.join(CROSTC_ROOT, 'validation_result')
START_DATE = datetime.date(2016, 1, 1)
TEST_PER_DAY = 4
@@ -137,24 +137,23 @@
# Common initializations
command_executer.InitCommandExecuter()
parser = argparse.ArgumentParser()
- parser.add_argument(
- '--chromeos_root',
- dest='chromeos_root',
- help='The chromeos root from which to run tests.')
- parser.add_argument(
- '--weekday',
- default='',
- dest='weekday',
- help='The day of the week for which to run tests.')
- parser.add_argument(
- '--board', default='', dest='board', help='The board to test.')
- parser.add_argument(
- '--patch',
- dest='patches',
- default='',
- help='The patches to use for the testing, '
- "seprate the patch numbers with ',' "
- 'for more than one patches.')
+ parser.add_argument('--chromeos_root',
+ dest='chromeos_root',
+ help='The chromeos root from which to run tests.')
+ parser.add_argument('--weekday',
+ default='',
+ dest='weekday',
+ help='The day of the week for which to run tests.')
+ parser.add_argument('--board',
+ default='',
+ dest='board',
+ help='The board to test.')
+ parser.add_argument('--patch',
+ dest='patches',
+ default='',
+ help='The patches to use for the testing, '
+ "seprate the patch numbers with ',' "
+ 'for more than one patches.')
parser.add_argument(
'--compiler',
dest='compiler',
diff --git a/buildbot_test_toolchains.py b/buildbot_test_toolchains.py
index b546894..6c3bfef 100755
--- a/buildbot_test_toolchains.py
+++ b/buildbot_test_toolchains.py
@@ -39,7 +39,7 @@
ROLE_ACCOUNT = 'mobiletc-prebuild'
TOOLCHAIN_DIR = os.path.dirname(os.path.realpath(__file__))
TMP_TOOLCHAIN_TEST = '/tmp/toolchain-tests'
-MAIL_PROGRAM = '~/var/bin/mail-sheriff'
+MAIL_PROGRAM = '~/var/bin/mail-detective'
PENDING_ARCHIVES_DIR = os.path.join(CROSTC_ROOT, 'pending_archives')
NIGHTLY_TESTS_RESULTS = os.path.join(CROSTC_ROOT, 'nightly_test_reports')
@@ -228,25 +228,25 @@
noschedv2_opts = '--noschedv2' if self._noschedv2 else ''
command = ('{crosperf} --no_email={no_email} --results_dir={r_dir} '
'--logging_level=verbose --json_report=True {noschedv2_opts} '
- '{exp_file}').format(
- crosperf=crosperf,
- no_email=not self._test,
- r_dir=self._reports_dir,
- noschedv2_opts=noschedv2_opts,
- exp_file=experiment_file)
+ '{exp_file}').format(crosperf=crosperf,
+ no_email=not self._test,
+ r_dir=self._reports_dir,
+ noschedv2_opts=noschedv2_opts,
+ exp_file=experiment_file)
return self._ce.RunCommand(command)
def _SendEmail(self):
"""Find email message generated by crosperf and send it."""
filename = os.path.join(self._reports_dir, 'msg_body.html')
- if (os.path.exists(filename) and
- os.path.exists(os.path.expanduser(MAIL_PROGRAM))):
+ if (os.path.exists(filename)
+ and os.path.exists(os.path.expanduser(MAIL_PROGRAM))):
email_title = 'buildbot llvm test results'
if USE_LLVM_NEXT_PATCH in self._patches_string:
email_title = 'buildbot llvm_next test results'
- command = ('cat %s | %s -s "%s, %s %s" -team -html' %
- (filename, MAIL_PROGRAM, email_title, self._board, self._date))
+ command = (
+ 'cat %s | %s -s "%s, %s %s" -team -html' %
+ (filename, MAIL_PROGRAM, email_title, self._board, self._date))
self._ce.RunCommand(command)
def _CopyJson(self):
@@ -317,44 +317,41 @@
# Common initializations
command_executer.InitCommandExecuter()
parser = argparse.ArgumentParser()
- parser.add_argument(
- '--remote', dest='remote', help='Remote machines to run tests on.')
- parser.add_argument(
- '--board', dest='board', default='x86-zgb', help='The target board.')
- parser.add_argument(
- '--chromeos_root',
- dest='chromeos_root',
- help='The chromeos root from which to run tests.')
- parser.add_argument(
- '--weekday',
- default='',
- dest='weekday',
- help='The day of the week for which to run tests.')
- parser.add_argument(
- '--patch',
- dest='patches',
- help='The patches to use for the testing, '
- "seprate the patch numbers with ',' "
- 'for more than one patches.')
- parser.add_argument(
- '--noschedv2',
- dest='noschedv2',
- action='store_true',
- default=False,
- help='Pass --noschedv2 to crosperf.')
- parser.add_argument(
- '--recipe',
- dest='recipe',
- default=True,
- help='Use images generated from recipe rather than'
- 'launching tryjob to get images.')
- parser.add_argument(
- '--test',
- dest='test',
- default=False,
- help='Test this script on local desktop, '
- 'disabling mobiletc checking and email sending.'
- 'Artifacts stored in /tmp/toolchain-tests')
+ parser.add_argument('--remote',
+ dest='remote',
+ help='Remote machines to run tests on.')
+ parser.add_argument('--board',
+ dest='board',
+ default='x86-zgb',
+ help='The target board.')
+ parser.add_argument('--chromeos_root',
+ dest='chromeos_root',
+ help='The chromeos root from which to run tests.')
+ parser.add_argument('--weekday',
+ default='',
+ dest='weekday',
+ help='The day of the week for which to run tests.')
+ parser.add_argument('--patch',
+ dest='patches',
+ help='The patches to use for the testing, '
+ "seprate the patch numbers with ',' "
+ 'for more than one patches.')
+ parser.add_argument('--noschedv2',
+ dest='noschedv2',
+ action='store_true',
+ default=False,
+ help='Pass --noschedv2 to crosperf.')
+ parser.add_argument('--recipe',
+ dest='recipe',
+ default=True,
+ help='Use images generated from recipe rather than'
+ 'launching tryjob to get images.')
+ parser.add_argument('--test',
+ dest='test',
+ default=False,
+ help='Test this script on local desktop, '
+ 'disabling mobiletc checking and email sending.'
+ 'Artifacts stored in /tmp/toolchain-tests')
options = parser.parse_args(argv[1:])
if not options.board:
@@ -372,9 +369,10 @@
shutil.rmtree(TMP_TOOLCHAIN_TEST)
os.mkdir(TMP_TOOLCHAIN_TEST)
- fc = ToolchainComparator(options.board, options.remote, options.chromeos_root,
- options.weekday, options.patches, options.recipe,
- options.test, options.noschedv2)
+ fc = ToolchainComparator(options.board, options.remote,
+ options.chromeos_root, options.weekday,
+ options.patches, options.recipe, options.test,
+ options.noschedv2)
return fc.DoAll()
diff --git a/compiler-test.sh b/compiler-test.sh
index 8083e2d..fe552a5 100755
--- a/compiler-test.sh
+++ b/compiler-test.sh
@@ -10,9 +10,6 @@
# each compiler. It writes out these statistics when it is done.
#
# For a locally-built ChromeOS image, the debug directory is usually:
-# ${chromeos_root}/chroot/build/${board}/usr/lib/debug (from outside
-# chroot)
-# or
# /build/${board}/usr/lib/debug (from inside chroot)
#
# For a buildbot-built image you can usually download the debug tree
@@ -44,7 +41,7 @@
cd ${DEBUG_TREE}
for f in `find . -name "*.debug" -type f` ; do
- at_producer=`readelf --debug-dump=info $f | head -25 | grep AT_producer `;
+ at_producer=`llvm-dwarfdump $f | head -25 | grep AT_producer `;
if echo ${at_producer} | grep -q 'GNU C' ; then
((gcc_count++))
elif echo ${at_producer} | grep -q 'clang'; then
diff --git a/compiler_wrapper/README.md b/compiler_wrapper/README.md
index e55bb66..12ae313 100644
--- a/compiler_wrapper/README.md
+++ b/compiler_wrapper/README.md
@@ -44,18 +44,10 @@
```
Rename chromiumos-overlay/sys-devel/gcc/gcc-${VERSION}.ebuild to the next
-revision number. For example, if the current version is 4.9.2-r254:
+revision number. For example, if the current version is 10.2.0-r3:
```
(chroot) cd ~/trunk/src/third_party/chromiumos-overlay
-(chroot) git mv sys-devel/gcc/gcc-4.9.2-r254.ebuild sys-devel/gcc/gcc-4.9.2-r255.ebuild
-```
-
-Edit the new ebuild file and change the number at the end to match the new revision:
-```
--# If you need to force a cros_workon uprev, change this number (you can use next
--# uprev): 254
-+# If you need to force a cros_workon uprev, change this number (you can use next
-+# uprev): 255
+(chroot) git mv sys-devel/gcc/gcc-10.2.0-r3.ebuild sys-devel/gcc/gcc-10.2.0-r4.ebuild
```
Commit those changes together with the changes made by
@@ -74,10 +66,10 @@
Generated wrappers are stored here:
- Sysroot wrapper with ccache:
- `/usr/x86_64-pc-linux-gnu/<arch>/gcc-bin/4.9.x/sysroot_wrapper.hardened.ccache`
+ `/usr/x86_64-pc-linux-gnu/<arch>/gcc-bin/10.2.0/sysroot_wrapper.hardened.ccache`
- Sysroot wrapper without ccache:
- `/usr/x86_64-pc-linux-gnu/<arch>/gcc-bin/4.9.x/sysroot_wrapper.hardened.noccache`
+ `/usr/x86_64-pc-linux-gnu/<arch>/gcc-bin/10.2.0/sysroot_wrapper.hardened.noccache`
- Clang host wrapper:
`/usr/bin/clang_host_wrapper`
- Gcc host wrapper:
- `/usr/x86_64-pc-linux-gnu/gcc-bin/4.9.x/host_wrapper`
+ `/usr/x86_64-pc-linux-gnu/gcc-bin/10.2.0/host_wrapper`
diff --git a/compiler_wrapper/bundle.py b/compiler_wrapper/bundle.py
index 173625f..6df8214 100755
--- a/compiler_wrapper/bundle.py
+++ b/compiler_wrapper/bundle.py
@@ -18,14 +18,25 @@
def parse_args():
parser = argparse.ArgumentParser()
- parser.add_argument('output_dir')
+ default_output_dir = os.path.normpath(
+ os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ '../../chromiumos-overlay/sys-devel/llvm/files/compiler_wrapper'))
+ parser.add_argument(
+ '--output_dir',
+ default=default_output_dir,
+ help='Output directory to place bundled files (default: %(default)s)')
+ parser.add_argument(
+ '--create',
+ action='store_true',
+ help='Create output_dir if it does not already exist')
return parser.parse_args()
def copy_files(input_dir, output_dir):
for filename in os.listdir(input_dir):
if ((filename.endswith('.go') and not filename.endswith('_test.go')) or
- filename == 'build.py'):
+ filename in ('build.py', 'go.mod')):
shutil.copy(
os.path.join(input_dir, filename), os.path.join(output_dir, filename))
@@ -57,6 +68,10 @@
args = parse_args()
input_dir = os.path.dirname(__file__)
change_id = read_change_id(input_dir)
+ if not args.create:
+ assert os.path.exists(
+ args.output_dir
+ ), f'Specified output directory ({args.output_dir}) does not exist'
shutil.rmtree(args.output_dir, ignore_errors=True)
os.makedirs(args.output_dir)
copy_files(input_dir, args.output_dir)
diff --git a/compiler_wrapper/ccache_flag.go b/compiler_wrapper/ccache_flag.go
index 265b8fc..02fb43a 100644
--- a/compiler_wrapper/ccache_flag.go
+++ b/compiler_wrapper/ccache_flag.go
@@ -19,6 +19,13 @@
return arg.value
})
+ // Disable ccache during portage's src_configure phase. Using ccache here is generally a
+ // waste of time, since these files are very small. Experimentally, this speeds up
+ // configuring by ~13%.
+ if val, present := builder.env.getenv("EBUILD_PHASE"); present && val == "configure" {
+ useCCache = false
+ }
+
if builder.cfg.useCCache && useCCache {
// Note: we used to also set CCACHE_BASEDIR but don't do it
// anymore for reasons outlined in crrev.com/c/2103170.
diff --git a/compiler_wrapper/ccache_flag_test.go b/compiler_wrapper/ccache_flag_test.go
index 03a74de..d6eeb92 100644
--- a/compiler_wrapper/ccache_flag_test.go
+++ b/compiler_wrapper/ccache_flag_test.go
@@ -5,6 +5,7 @@
package main
import (
+ "path/filepath"
"testing"
)
@@ -160,3 +161,29 @@
work(ctx)
})
}
+
+func TestRusagePreventsCCache(t *testing.T) {
+ withCCacheEnabledTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
+ ctx.env = append(ctx.env, "TOOLCHAIN_RUSAGE_OUTPUT="+filepath.Join(ctx.tempDir, "rusage.log"))
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestCcacheIsDisabledInSrcConfigure(t *testing.T) {
+ withCCacheEnabledTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
+ ctx.env = append(ctx.env, "EBUILD_PHASE=configure")
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
+ t.Error(err)
+ }
+ })
+}
diff --git a/compiler_wrapper/clang_flags.go b/compiler_wrapper/clang_flags.go
index 9eb951d..e25ed74 100644
--- a/compiler_wrapper/clang_flags.go
+++ b/compiler_wrapper/clang_flags.go
@@ -7,6 +7,7 @@
import (
"bytes"
"os"
+ "path"
"path/filepath"
"strings"
)
@@ -46,9 +47,7 @@
//
// Use of -Qunused-arguments allows this set to be small, just those
// that clang still warns about.
- unsupported := map[string]bool{
- "-pass-exit-codes": true,
- }
+ unsupported := make(map[string]bool)
unsupportedPrefixes := []string{"-Wstrict-aliasing=", "-finline-limit="}
@@ -64,11 +63,8 @@
// Clang may use different options for the same or similar functionality.
gccToClang := map[string]string{
- "-Wno-error=cpp": "-Wno-#warnings",
- "-Wno-error=maybe-uninitialized": "-Wno-error=uninitialized",
- "-Wno-error=unused-but-set-variable": "-Wno-error=unused-variable",
- "-Wno-unused-but-set-variable": "-Wno-unused-variable",
- "-Wunused-but-set-variable": "-Wunused-variable",
+ "-Wno-error=cpp": "-Wno-#warnings",
+ "-Wno-error=maybe-uninitialized": "-Wno-error=uninitialized",
}
// Note: not using builder.transformArgs as we need to add multiple arguments
@@ -130,25 +126,16 @@
// Specify the target for clang.
if !builder.cfg.isHostWrapper {
- linkerPath := getLinkerPath(env, builder.target.target+"-ld", builder.rootPath)
+ linkerPath := getLinkerPath(env, builder.target.target+"-ld.bfd", builder.rootPath)
relLinkerPath, err := filepath.Rel(env.getwd(), linkerPath)
if err != nil {
return wrapErrorwithSourceLocf(err, "failed to make linker path %s relative to %s",
linkerPath, env.getwd())
}
+ prefixPath := path.Join(relLinkerPath, builder.target.target+"-")
+ builder.addPreUserArgs("--prefix=" + prefixPath)
builder.addPostUserArgs("-B" + relLinkerPath)
- if startswithI86(builder.target.arch) {
- // TODO: -target i686-pc-linux-gnu causes clang to search for
- // libclang_rt.asan-i686.a which doesn't exist because it's packaged
- // as libclang_rt.asan-i386.a. We can't use -target i386-pc-linux-gnu
- // because then it would try to run i386-pc-linux-gnu-ld which doesn't
- // exist. Consider renaming the runtime library to use i686 in its name.
- builder.addPostUserArgs("-m32")
- // clang does not support -mno-movbe. This is the alternate way to do it.
- builder.addPostUserArgs("-Xclang", "-target-feature", "-Xclang", "-movbe")
- } else {
- builder.addPostUserArgs("-target", builder.target.target)
- }
+ builder.addPostUserArgs("-target", builder.target.target)
}
return nil
}
diff --git a/compiler_wrapper/clang_flags_test.go b/compiler_wrapper/clang_flags_test.go
index a4145c2..23aed7e 100644
--- a/compiler_wrapper/clang_flags_test.go
+++ b/compiler_wrapper/clang_flags_test.go
@@ -46,7 +46,7 @@
func TestAbsoluteClangPathBasedOnRootPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand(filepath.Join(ctx.tempDir, clangX86_64), mainCc)))
if err := verifyPath(cmd, filepath.Join(ctx.tempDir, "somepath/usr/bin/clang")); err != nil {
@@ -57,7 +57,7 @@
func TestRelativeClangPathBasedOnRootPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand(clangX86_64, mainCc)))
if err := verifyPath(cmd, "somepath/usr/bin/clang"); err != nil {
@@ -68,7 +68,7 @@
func TestRelativeClangPathWithDirBasedOnRootPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand("test/x86_64-cros-linux-gnu-clang", mainCc)))
if err := verifyPath(cmd, "test/somepath/usr/bin/clang"); err != nil {
@@ -79,7 +79,7 @@
func TestPathEnvClangPathBasedOnRootPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
ctx.env = []string{"PATH=" + filepath.Join(ctx.tempDir, "/pathenv")}
ctx.writeFile(filepath.Join(ctx.tempDir, "/pathenv/x86_64-cros-linux-gnu-clang"), "")
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
@@ -93,7 +93,7 @@
func TestClangPathForClangHostWrapper(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
ctx.cfg.isHostWrapper = true
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand(clangX86_64, mainCc)))
if err := verifyPath(cmd, filepath.Join(ctx.tempDir, "clang")); err != nil {
@@ -128,7 +128,7 @@
func TestUseXclangPathAndCalcResourceDirByNestedClangCall(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
if ctx.cmdCount > 1 {
return nil
@@ -183,10 +183,7 @@
in string
out string
}{
- {"-Wno-error=unused-but-set-variable", "-Wno-error=unused-variable"},
{"-Wno-error=maybe-uninitialized", "-Wno-error=uninitialized"},
- {"-Wno-unused-but-set-variable", "-Wno-unused-variable"},
- {"-Wunused-but-set-variable", "-Wunused-variable"},
{"-Wno-error=cpp", "-Wno-#warnings"},
{"-Xclang-only=-abc=xyz", "-abc=xyz"},
}
@@ -211,7 +208,6 @@
flag string
expectedCount int
}{
- {clangX86_64, "-pass-exit-codes", 0},
{clangX86_64, "-Wstrict-aliasing=xyz", 0},
{clangX86_64, "-finline-limit=xyz", 0},
{"./armv7a-cros-linux-gnu-clang", "-ftrapv", 0},
@@ -236,7 +232,7 @@
compiler string
flags []string
}{
- {"./i686_64-cros-linux-gnu-clang", []string{mainCc, "-m32", "-Xclang", "-target-feature", "-Xclang", "-movbe"}},
+ {"./i686_64-cros-linux-gnu-clang", []string{mainCc, "-target", "i686_64-cros-linux-gnu"}},
{"./x86_64-cros-linux-gnu-clang", []string{mainCc, "-target", "x86_64-cros-linux-gnu"}},
}
for _, tt := range tests {
@@ -252,13 +248,17 @@
func TestClangLinkerPathProbesBinariesOnPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
linkerPath := filepath.Join(ctx.tempDir, "a/b/c")
- ctx.writeFile(filepath.Join(linkerPath, "x86_64-cros-linux-gnu-ld"), "")
+ ctx.writeFile(filepath.Join(linkerPath, "x86_64-cros-linux-gnu-ld.bfd"), "")
ctx.env = []string{"PATH=nonExistantPath:" + linkerPath}
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand("./x86_64-cros-linux-gnu-clang", mainCc)))
if err := verifyArgOrder(cmd, "-Ba/b/c"); err != nil {
t.Error(err)
}
+ if err := verifyArgOrder(cmd, "--prefix=a/b/c/x86_64-cros-linux-gnu-"); err != nil {
+ t.Error(err)
+ }
+
})
}
@@ -268,7 +268,7 @@
ctx.writeFile(realLinkerPath, "")
firstLinkLinkerPath := filepath.Join(ctx.tempDir, "a/first/somelinker")
ctx.symlink(realLinkerPath, firstLinkLinkerPath)
- secondLinkLinkerPath := filepath.Join(ctx.tempDir, "a/second/x86_64-cros-linux-gnu-ld")
+ secondLinkLinkerPath := filepath.Join(ctx.tempDir, "a/second/x86_64-cros-linux-gnu-ld.bfd")
ctx.symlink(firstLinkLinkerPath, secondLinkLinkerPath)
ctx.env = []string{"PATH=nonExistantPath:" + filepath.Dir(secondLinkLinkerPath)}
@@ -277,6 +277,10 @@
if err := verifyArgOrder(cmd, "-Ba/first"); err != nil {
t.Error(err)
}
+ if err := verifyArgOrder(cmd, "--prefix=a/first/x86_64-cros-linux-gnu-"); err != nil {
+ t.Error(err)
+ }
+
})
}
@@ -287,16 +291,22 @@
if err := verifyArgOrder(cmd, "-Bbin"); err != nil {
t.Error(err)
}
+ if err := verifyArgOrder(cmd, "--prefix=bin/x86_64-cros-linux-gnu-"); err != nil {
+ t.Error(err)
+ }
})
}
func TestClangLinkerPathRelativeToRootDir(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.clangRootRelPath = "somepath"
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand(clangX86_64, mainCc)))
if err := verifyArgOrder(cmd, "-Bsomepath/bin"); err != nil {
t.Error(err)
}
+ if err := verifyArgOrder(cmd, "--prefix=somepath/bin/x86_64-cros-linux-gnu-"); err != nil {
+ t.Error(err)
+ }
})
}
diff --git a/compiler_wrapper/command.go b/compiler_wrapper/command.go
index 6957859..eb040b2 100644
--- a/compiler_wrapper/command.go
+++ b/compiler_wrapper/command.go
@@ -5,12 +5,14 @@
package main
import (
+ "context"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"strings"
+ "time"
)
type command struct {
@@ -24,15 +26,8 @@
}
func newProcessCommand() *command {
- // This is a workaround for the fact that ld.so does not support
- // passing in the executable name when ld.so is invoked as
- // an executable (crbug/1003841).
- path := os.Getenv("LD_ARGV0")
- if path == "" {
- path = os.Args[0]
- }
return &command{
- Path: path,
+ Path: os.Args[0],
Args: os.Args[1:],
}
}
@@ -70,6 +65,26 @@
return execCmd.Run()
}
+func runCmdWithTimeout(env env, cmd *command, t time.Duration) error {
+ ctx, cancel := context.WithTimeout(context.Background(), t)
+ defer cancel()
+ cmdCtx := exec.CommandContext(ctx, cmd.Path, cmd.Args...)
+ cmdCtx.Env = mergeEnvValues(env.environ(), cmd.EnvUpdates)
+ cmdCtx.Dir = env.getwd()
+ cmdCtx.Stdin = env.stdin()
+ cmdCtx.Stdout = env.stdout()
+ cmdCtx.Stderr = env.stderr()
+
+ if err := cmdCtx.Start(); err != nil {
+ return newErrorwithSourceLocf("exec error: %v", err)
+ }
+ err := cmdCtx.Wait()
+ if ctx.Err() == nil {
+ return err
+ }
+ return ctx.Err()
+}
+
func resolveAgainstPathEnv(env env, cmd string) (string, error) {
path, _ := env.getenv("PATH")
for _, path := range strings.Split(path, ":") {
@@ -141,7 +156,12 @@
if err != nil {
return nil, err
}
- rootPath := filepath.Join(filepath.Dir(absWrapperPath), cfg.rootRelPath)
+ var rootPath string
+ if compilerType == gccType {
+ rootPath = filepath.Join(filepath.Dir(absWrapperPath), cfg.gccRootRelPath)
+ } else {
+ rootPath = filepath.Join(filepath.Dir(absWrapperPath), cfg.clangRootRelPath)
+ }
return &commandBuilder{
path: cmd.Path,
args: createBuilderArgs( /*fromUser=*/ true, cmd.Args),
@@ -207,8 +227,10 @@
}
}
-func (builder *commandBuilder) wrapPath(path string) {
- builder.args = append([]builderArg{{value: builder.path, fromUser: false}}, builder.args...)
+func (builder *commandBuilder) wrapPath(path string, extraFlags ...string) {
+ newArgs := createBuilderArgs( /*fromUser=*/ false, extraFlags)
+ newArgs = append(newArgs, builderArg{value: builder.path, fromUser: false})
+ builder.args = append(newArgs, builder.args...)
builder.path = path
}
diff --git a/compiler_wrapper/compile_with_fallback_test.go b/compiler_wrapper/compile_with_fallback_test.go
index a67f3eb..f9da441 100644
--- a/compiler_wrapper/compile_with_fallback_test.go
+++ b/compiler_wrapper/compile_with_fallback_test.go
@@ -221,6 +221,8 @@
func TestCompileWithFallbackLogCommandAndErrors(t *testing.T) {
withCompileWithFallbackTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestReadsFromUmask()
+
ctx.env = append(ctx.env, "ANDROID_LLVM_FALLBACK_DISABLED_WARNINGS=-a -b")
ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
switch ctx.cmdCount {
diff --git a/compiler_wrapper/compiler_wrapper.go b/compiler_wrapper/compiler_wrapper.go
index 2b9459e..986eaba 100644
--- a/compiler_wrapper/compiler_wrapper.go
+++ b/compiler_wrapper/compiler_wrapper.go
@@ -6,10 +6,14 @@
import (
"bytes"
+ "context"
+ "errors"
"fmt"
"io"
"path/filepath"
+ "strconv"
"strings"
+ "time"
)
func callCompiler(env env, cfg *config, inputCmd *command) int {
@@ -61,6 +65,50 @@
return "." + string(filepath.Separator) + basePart
}
+func runAndroidClangTidy(env env, cmd *command) error {
+ timeout, found := env.getenv("TIDY_TIMEOUT")
+ if !found {
+ return env.exec(cmd)
+ }
+ seconds, err := strconv.Atoi(timeout)
+ if err != nil || seconds == 0 {
+ return env.exec(cmd)
+ }
+ getSourceFile := func() string {
+ // Note: This depends on Android build system's clang-tidy command line format.
+ // Last non-flag before "--" in cmd.Args is used as the source file name.
+ sourceFile := "unknown_file"
+ for _, arg := range cmd.Args {
+ if arg == "--" {
+ break
+ }
+ if strings.HasPrefix(arg, "-") {
+ continue
+ }
+ sourceFile = arg
+ }
+ return sourceFile
+ }
+ startTime := time.Now()
+ err = env.runWithTimeout(cmd, time.Duration(seconds)*time.Second)
+ if !errors.Is(err, context.DeadlineExceeded) {
+ // When used time is over half of TIDY_TIMEOUT, give a warning.
+ // These warnings allow users to fix slow jobs before they get worse.
+ usedSeconds := int(time.Now().Sub(startTime) / time.Second)
+ if usedSeconds > seconds/2 {
+ warning := "%s:1:1: warning: clang-tidy used %d seconds.\n"
+ fmt.Fprintf(env.stdout(), warning, getSourceFile(), usedSeconds)
+ }
+ return err
+ }
+ // When DeadllineExceeded, print warning messages.
+ warning := "%s:1:1: warning: clang-tidy aborted after %d seconds.\n"
+ fmt.Fprintf(env.stdout(), warning, getSourceFile(), seconds)
+ fmt.Fprintf(env.stdout(), "TIMEOUT: %s %s\n", cmd.Path, strings.Join(cmd.Args, " "))
+ // Do not stop Android build. Just give a warning and return no error.
+ return nil
+}
+
func callCompilerInternal(env env, cfg *config, inputCmd *command) (exitCode int, err error) {
if err := checkUnsupportedFlags(inputCmd); err != nil {
return 0, err
@@ -74,6 +122,15 @@
env = mainBuilder.env
var compilerCmd *command
clangSyntax := processClangSyntaxFlag(mainBuilder)
+
+ rusageEnabled := isRusageEnabled(env)
+
+ // Disable CCache for rusage logs
+ // Note: Disabling Goma causes timeout related INFRA_FAILUREs in builders
+ allowCCache := !rusageEnabled
+ remoteBuildUsed := false
+
+ workAroundKernelBugWithRetries := false
if cfg.isAndroidWrapper {
mainBuilder.path = calculateAndroidWrapperPath(mainBuilder.path, mainBuilder.absWrapperPath)
switch mainBuilder.target.compilerType {
@@ -81,7 +138,9 @@
mainBuilder.addPreUserArgs(mainBuilder.cfg.clangFlags...)
mainBuilder.addPreUserArgs(mainBuilder.cfg.commonFlags...)
mainBuilder.addPostUserArgs(mainBuilder.cfg.clangPostFlags...)
- if _, err := processGomaCccFlags(mainBuilder); err != nil {
+ inheritGomaFromEnv := true
+ // Android doesn't support rewrapper; don't try to use it.
+ if remoteBuildUsed, err = processGomaCccFlags(mainBuilder, inheritGomaFromEnv); err != nil {
return 0, err
}
compilerCmd = mainBuilder.build()
@@ -97,19 +156,18 @@
if err != nil {
return 0, err
}
- allowCCache := true
if tidyMode != tidyModeNone {
allowCCache = false
- clangCmdWithoutGomaAndCCache := mainBuilder.build()
+ clangCmdWithoutRemoteBuildAndCCache := mainBuilder.build()
var err error
switch tidyMode {
case tidyModeTricium:
if cfg.triciumNitsDir == "" {
return 0, newErrorwithSourceLocf("tricium linting was requested, but no nits directory is configured")
}
- err = runClangTidyForTricium(env, clangCmdWithoutGomaAndCCache, cSrcFile, cfg.triciumNitsDir, tidyFlags, cfg.crashArtifactsDir)
+ err = runClangTidyForTricium(env, clangCmdWithoutRemoteBuildAndCCache, cSrcFile, cfg.triciumNitsDir, tidyFlags, cfg.crashArtifactsDir)
case tidyModeAll:
- err = runClangTidy(env, clangCmdWithoutGomaAndCCache, cSrcFile, tidyFlags)
+ err = runClangTidy(env, clangCmdWithoutRemoteBuildAndCCache, cSrcFile, tidyFlags)
default:
panic(fmt.Sprintf("Unknown tidy mode: %v", tidyMode))
}
@@ -118,64 +176,135 @@
return 0, err
}
}
- if err := processGomaCCacheFlags(allowCCache, mainBuilder); err != nil {
+ if remoteBuildUsed, err = processRemoteBuildAndCCacheFlags(allowCCache, mainBuilder); err != nil {
return 0, err
}
compilerCmd = mainBuilder.build()
} else {
if clangSyntax {
- allowCCache := false
- clangCmd, err := calcClangCommand(allowCCache, mainBuilder.clone())
+ allowCCache = false
+ _, clangCmd, err := calcClangCommand(allowCCache, mainBuilder.clone())
if err != nil {
return 0, err
}
- gccCmd, err := calcGccCommand(mainBuilder)
+ _, gccCmd, err := calcGccCommand(rusageEnabled, mainBuilder)
if err != nil {
return 0, err
}
return checkClangSyntax(env, clangCmd, gccCmd)
}
- compilerCmd, err = calcGccCommand(mainBuilder)
+ remoteBuildUsed, compilerCmd, err = calcGccCommand(rusageEnabled, mainBuilder)
if err != nil {
return 0, err
}
+ workAroundKernelBugWithRetries = true
}
}
- rusageLogfileName := getRusageLogFilename(env)
+
bisectStage := getBisectStage(env)
- if shouldForceDisableWerror(env, cfg) {
- if rusageLogfileName != "" {
- return 0, newUserErrorf("GETRUSAGE is meaningless with FORCE_DISABLE_WERROR")
- }
+
+ if rusageEnabled {
+ compilerCmd = removeRusageFromCommand(compilerCmd)
+ }
+
+ if shouldForceDisableWerror(env, cfg, mainBuilder.target.compilerType) {
if bisectStage != "" {
return 0, newUserErrorf("BISECT_STAGE is meaningless with FORCE_DISABLE_WERROR")
}
return doubleBuildWithWNoError(env, cfg, compilerCmd)
}
if shouldCompileWithFallback(env) {
- if rusageLogfileName != "" {
- return 0, newUserErrorf("GETRUSAGE is meaningless with FORCE_DISABLE_WERROR")
+ if rusageEnabled {
+ return 0, newUserErrorf("TOOLCHAIN_RUSAGE_OUTPUT is meaningless with ANDROID_LLVM_PREBUILT_COMPILER_PATH")
}
if bisectStage != "" {
- return 0, newUserErrorf("BISECT_STAGE is meaningless with FORCE_DISABLE_WERROR")
+ return 0, newUserErrorf("BISECT_STAGE is meaningless with ANDROID_LLVM_PREBUILT_COMPILER_PATH")
}
return compileWithFallback(env, cfg, compilerCmd, mainBuilder.absWrapperPath)
}
- if rusageLogfileName != "" {
- if bisectStage != "" {
- return 0, newUserErrorf("BISECT_STAGE is meaningless with GETRUSAGE")
- }
- return logRusage(env, rusageLogfileName, compilerCmd)
- }
if bisectStage != "" {
+ if rusageEnabled {
+ return 0, newUserErrorf("TOOLCHAIN_RUSAGE_OUTPUT is meaningless with BISECT_STAGE")
+ }
compilerCmd, err = calcBisectCommand(env, cfg, bisectStage, compilerCmd)
if err != nil {
return 0, err
}
}
- // Note: We return an exit code only if the underlying env is not
- // really doing an exec, e.g. commandRecordingEnv.
- return wrapSubprocessErrorWithSourceLoc(compilerCmd, env.exec(compilerCmd))
+
+ errRetryCompilation := errors.New("compilation retry requested")
+ var runCompiler func(willLogRusage bool) (int, error)
+ if !workAroundKernelBugWithRetries {
+ runCompiler = func(willLogRusage bool) (int, error) {
+ var err error
+ if willLogRusage {
+ err = env.run(compilerCmd, env.stdin(), env.stdout(), env.stderr())
+ } else if cfg.isAndroidWrapper && mainBuilder.target.compilerType == clangTidyType {
+ // Only clang-tidy has timeout feature now.
+ err = runAndroidClangTidy(env, compilerCmd)
+ } else {
+ // Note: We return from this in non-fatal circumstances only if the
+ // underlying env is not really doing an exec, e.g. commandRecordingEnv.
+ err = env.exec(compilerCmd)
+ }
+ return wrapSubprocessErrorWithSourceLoc(compilerCmd, err)
+ }
+ } else {
+ getStdin, err := prebufferStdinIfNeeded(env, compilerCmd)
+ if err != nil {
+ return 0, wrapErrorwithSourceLocf(err, "prebuffering stdin: %v", err)
+ }
+
+ stdoutBuffer := &bytes.Buffer{}
+ stderrBuffer := &bytes.Buffer{}
+ retryAttempt := 0
+ runCompiler = func(willLogRusage bool) (int, error) {
+ retryAttempt++
+ stdoutBuffer.Reset()
+ stderrBuffer.Reset()
+
+ exitCode, compilerErr := wrapSubprocessErrorWithSourceLoc(compilerCmd,
+ env.run(compilerCmd, getStdin(), stdoutBuffer, stderrBuffer))
+
+ if compilerErr != nil || exitCode != 0 {
+ if retryAttempt < kernelBugRetryLimit && (errorContainsTracesOfKernelBug(compilerErr) || containsTracesOfKernelBug(stdoutBuffer.Bytes()) || containsTracesOfKernelBug(stderrBuffer.Bytes())) {
+ return exitCode, errRetryCompilation
+ }
+ }
+ _, stdoutErr := stdoutBuffer.WriteTo(env.stdout())
+ _, stderrErr := stderrBuffer.WriteTo(env.stderr())
+ if stdoutErr != nil {
+ return exitCode, wrapErrorwithSourceLocf(err, "writing stdout: %v", stdoutErr)
+ }
+ if stderrErr != nil {
+ return exitCode, wrapErrorwithSourceLocf(err, "writing stderr: %v", stderrErr)
+ }
+ return exitCode, compilerErr
+ }
+ }
+
+ for {
+ var exitCode int
+ commitRusage, err := maybeCaptureRusage(env, compilerCmd, func(willLogRusage bool) error {
+ var err error
+ exitCode, err = runCompiler(willLogRusage)
+ return err
+ })
+
+ switch {
+ case err == errRetryCompilation:
+ // Loop around again.
+ case err != nil:
+ return exitCode, err
+ default:
+ if !remoteBuildUsed {
+ if err := commitRusage(exitCode); err != nil {
+ return exitCode, fmt.Errorf("commiting rusage: %v", err)
+ }
+ }
+ return exitCode, err
+ }
+ }
}
func prepareClangCommand(builder *commandBuilder) (err error) {
@@ -191,38 +320,40 @@
return processClangFlags(builder)
}
-func calcClangCommand(allowCCache bool, builder *commandBuilder) (*command, error) {
+func calcClangCommand(allowCCache bool, builder *commandBuilder) (bool, *command, error) {
err := prepareClangCommand(builder)
if err != nil {
- return nil, err
+ return false, nil, err
}
- if err := processGomaCCacheFlags(allowCCache, builder); err != nil {
- return nil, err
+ remoteBuildUsed, err := processRemoteBuildAndCCacheFlags(allowCCache, builder)
+ if err != nil {
+ return remoteBuildUsed, nil, err
}
- return builder.build(), nil
+ return remoteBuildUsed, builder.build(), nil
}
-func calcGccCommand(builder *commandBuilder) (*command, error) {
+func calcGccCommand(enableRusage bool, builder *commandBuilder) (bool, *command, error) {
if !builder.cfg.isHostWrapper {
processSysrootFlag(builder)
}
builder.addPreUserArgs(builder.cfg.gccFlags...)
- if !builder.cfg.isHostWrapper {
- calcCommonPreUserArgs(builder)
- }
+ calcCommonPreUserArgs(builder)
processGccFlags(builder)
+
+ remoteBuildUsed := false
if !builder.cfg.isHostWrapper {
- allowCCache := true
- if err := processGomaCCacheFlags(allowCCache, builder); err != nil {
- return nil, err
+ var err error
+ if remoteBuildUsed, err = processRemoteBuildAndCCacheFlags(!enableRusage, builder); err != nil {
+ return remoteBuildUsed, nil, err
}
}
- return builder.build(), nil
+ return remoteBuildUsed, builder.build(), nil
}
func calcCommonPreUserArgs(builder *commandBuilder) {
builder.addPreUserArgs(builder.cfg.commonFlags...)
if !builder.cfg.isHostWrapper {
+ processLibGCCFlags(builder)
processPieFlags(builder)
processThumbCodeFlags(builder)
processStackProtectorFlags(builder)
@@ -231,18 +362,18 @@
processSanitizerFlags(builder)
}
-func processGomaCCacheFlags(allowCCache bool, builder *commandBuilder) (err error) {
- gomaccUsed := false
+func processRemoteBuildAndCCacheFlags(allowCCache bool, builder *commandBuilder) (remoteBuildUsed bool, err error) {
+ remoteBuildUsed = false
if !builder.cfg.isHostWrapper {
- gomaccUsed, err = processGomaCccFlags(builder)
+ remoteBuildUsed, err = processRemoteBuildFlags(builder)
if err != nil {
- return err
+ return remoteBuildUsed, err
}
}
- if !gomaccUsed && allowCCache {
+ if !remoteBuildUsed && allowCCache {
processCCacheFlag(builder)
}
- return nil
+ return remoteBuildUsed, nil
}
func getAbsWrapperPath(env env, wrapperCmd *command) (string, error) {
diff --git a/compiler_wrapper/compiler_wrapper_test.go b/compiler_wrapper/compiler_wrapper_test.go
index 52b92f5..74fe3f5 100644
--- a/compiler_wrapper/compiler_wrapper_test.go
+++ b/compiler_wrapper/compiler_wrapper_test.go
@@ -9,6 +9,9 @@
"errors"
"fmt"
"io"
+ "os"
+ "path"
+ "path/filepath"
"strings"
"syscall"
"testing"
@@ -94,27 +97,76 @@
})
}
-func TestErrorOnLogRusageAndForceDisableWError(t *testing.T) {
+func TestGomaDisablesRusage(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
+ gomaPath := path.Join(ctx.tempDir, "gomacc")
+ ctx.writeFile(gomaPath, "")
+ ctx.env = []string{"GOMACC_PATH=" + gomaPath}
+ logFileName := filepath.Join(ctx.tempDir, "rusage.log")
+ ctx.env = []string{
+ "TOOLCHAIN_RUSAGE_OUTPUT=" + logFileName,
+ "GOMACC_PATH=" + gomaPath,
+ }
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
+ // Ensure Goma was used
+ if err := verifyPath(cmd, gomaPath); err != nil {
+ t.Fatal(err)
+ }
+ if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
+ t.Error(err)
+ }
+ // Ensure rusage log was not created
+ if _, err := os.Stat(logFileName); err == nil {
+ t.Errorf("Logfile shouldn't have been created at TOOLCHAIN_RUSAGE_OUTPUT path %q but was", logFileName)
+ } else if !os.IsNotExist(err) {
+ t.Fatalf("error checking for rusage logfile at %q: %v", logFileName, err)
+ }
+ })
+}
+
+func TestLogRusageAndForceDisableWError(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
+ logFileName := filepath.Join(ctx.tempDir, "rusage.log")
ctx.env = []string{
"FORCE_DISABLE_WERROR=1",
- "GETRUSAGE=rusage.log",
+ "TOOLCHAIN_RUSAGE_OUTPUT=" + logFileName,
}
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNonInternalError(stderr, "GETRUSAGE is meaningless with FORCE_DISABLE_WERROR"); err != nil {
- t.Error(err)
+ ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
+ switch ctx.cmdCount {
+ case 1:
+ io.WriteString(stderr, "-Werror originalerror")
+ return newExitCodeError(1)
+ case 2:
+ return nil
+ default:
+ t.Fatalf("unexpected command: %#v", cmd)
+ return nil
+ }
+ }
+ ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
+ if _, err := os.Stat(logFileName); os.IsNotExist(err) {
+ t.Errorf("no logfile created at TOOLCHAIN_RUSAGE_OUTPUT path %q", logFileName)
+ } else if err != nil {
+ t.Fatalf("error checking for rusage logfile at %q: %v", logFileName, err)
+ }
+ if ctx.cmdCount != 2 {
+ t.Errorf("expected 2 calls. Got: %d", ctx.cmdCount)
}
})
}
func TestErrorOnLogRusageAndBisect(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
ctx.env = []string{
"BISECT_STAGE=xyz",
- "GETRUSAGE=rusage.log",
+ "TOOLCHAIN_RUSAGE_OUTPUT=rusage.log",
}
stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyNonInternalError(stderr, "BISECT_STAGE is meaningless with GETRUSAGE"); err != nil {
+ if err := verifyNonInternalError(stderr, "TOOLCHAIN_RUSAGE_OUTPUT is meaningless with BISECT_STAGE"); err != nil {
t.Error(err)
}
})
@@ -122,11 +174,13 @@
func TestErrorOnBisectAndForceDisableWError(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
ctx.env = []string{
"BISECT_STAGE=xyz",
"FORCE_DISABLE_WERROR=1",
}
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
+ stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(clangX86_64, mainCc)))
if err := verifyNonInternalError(stderr, "BISECT_STAGE is meaningless with FORCE_DISABLE_WERROR"); err != nil {
t.Error(err)
}
diff --git a/compiler_wrapper/config.go b/compiler_wrapper/config.go
index 8b5432b..6c28287 100644
--- a/compiler_wrapper/config.go
+++ b/compiler_wrapper/config.go
@@ -26,7 +26,8 @@
// by the user).
clangPostFlags []string
// Toolchain root path relative to the wrapper binary.
- rootRelPath string
+ clangRootRelPath string
+ gccRootRelPath string
// Directory to store errors that were prevented with -Wno-error.
newWarningsDir string
// Directory to store nits in when using `WITH_TIDY=tricium`.
@@ -105,8 +106,12 @@
// Full hardening.
// Temporarily disable function splitting because of chromium:434751.
var crosHardenedConfig = &config{
- rootRelPath: "../../../../..",
+ clangRootRelPath: "../..",
+ gccRootRelPath: "../../../../..",
+ // Pass "-fcommon" till the packages are fixed to work with new clang/gcc
+ // default of "-fno-common", crbug.com/1060413.
commonFlags: []string{
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
@@ -122,29 +127,34 @@
// Temporarily add no-unknown-warning-option to deal with old clang versions.
// Temporarily disable Wsection since kernel gets a bunch of these. chromium:778867
// Disable "-faddrsig" since it produces object files that strip doesn't understand, chromium:915742.
- // Pass "-fcommon" till the packages are fixed to work with new clang default
- // "-fno-common", crbug.com/1060413.
// crbug.com/1103065: -grecord-gcc-switches pollutes the Goma cache;
// removed that flag for now.
+ // Temporarily disable Wdeprecated-declarations. b/193860318
+
clangFlags: []string{
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
},
+
+ // Temporarily disable Wdeprecated-copy. b/191479033
clangPostFlags: []string{
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
},
newWarningsDir: "/tmp/fatal_clang_warnings",
triciumNitsDir: "/tmp/linting_output/clang-tidy",
@@ -153,8 +163,9 @@
// Flags to be added to non-hardened toolchain.
var crosNonHardenedConfig = &config{
- rootRelPath: "../../../../..",
- commonFlags: []string{},
+ clangRootRelPath: "../..",
+ gccRootRelPath: "../../../../..",
+ commonFlags: []string{},
gccFlags: []string{
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
@@ -164,22 +175,28 @@
// Temporarily disable tautological-*-compare chromium:778316.
// Temporarily add no-unknown-warning-option to deal with old clang versions.
// Temporarily disable Wsection since kernel gets a bunch of these. chromium:778867
+ // Temporarily disable Wdeprecated-declarations. b/193860318
clangFlags: []string{
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
},
+
+ // Temporarily disable Wdeprecated-copy. b/191479033
clangPostFlags: []string{
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
},
newWarningsDir: "/tmp/fatal_clang_warnings",
triciumNitsDir: "/tmp/linting_output/clang-tidy",
@@ -188,9 +205,14 @@
// Flags to be added to host toolchain.
var crosHostConfig = &config{
- isHostWrapper: true,
- rootRelPath: "../..",
- commonFlags: []string{},
+ isHostWrapper: true,
+ clangRootRelPath: "../..",
+ gccRootRelPath: "../..",
+ // Pass "-fcommon" till the packages are fixed to work with new clang/gcc
+ // default of "-fno-common", crbug.com/1060413.
+ commonFlags: []string{
+ "-fcommon",
+ },
gccFlags: []string{
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
@@ -198,17 +220,15 @@
},
// Temporarily disable tautological-*-compare chromium:778316.
// Temporarily add no-unknown-warning-option to deal with old clang versions.
- // Pass "-fcommon" till the packages are fixed to work with new clang default
- // "-fno-common", crbug.com/1060413.
// crbug.com/1103065: -grecord-gcc-switches pollutes the Goma cache;
// removed that flag for now.
+ // Temporarily disable Wdeprecated-declarations. b/193860318
clangFlags: []string{
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -216,11 +236,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
},
+
+ // Temporarily disable Wdeprecated-copy. b/191479033
clangPostFlags: []string{
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
},
newWarningsDir: "/tmp/fatal_clang_warnings",
triciumNitsDir: "/tmp/linting_output/clang-tidy",
@@ -230,7 +255,8 @@
var androidConfig = &config{
isHostWrapper: false,
isAndroidWrapper: true,
- rootRelPath: "./",
+ gccRootRelPath: "./",
+ clangRootRelPath: "./",
commonFlags: []string{},
gccFlags: []string{},
clangFlags: []string{},
diff --git a/compiler_wrapper/config_test.go b/compiler_wrapper/config_test.go
index 5492a1b..86a7892 100644
--- a/compiler_wrapper/config_test.go
+++ b/compiler_wrapper/config_test.go
@@ -38,7 +38,6 @@
}
}
-/* TODO: Re-enable this, when llvm-next is different than llvm
func TestRealConfigWithUseLLvmFlag(t *testing.T) {
resetGlobals()
defer resetGlobals()
@@ -50,7 +49,7 @@
if err != nil {
t.Fatal(err)
}
- if isUsingLLvmNext(cfg) {
+ if cfg.useLlvmNext {
t.Fatal("UseLLvmNext: Expected not to be used")
}
@@ -60,7 +59,7 @@
t.Fatal(err)
}
- if !isUsingLLvmNext(cfg) {
+ if !cfg.useLlvmNext {
t.Fatal("UseLLvmNext: Expected to be used")
}
@@ -69,7 +68,6 @@
t.Fatalf("UseLlvmNext: Expected an error, got none")
}
}
-*/
func TestRealConfigWithConfigNameFlag(t *testing.T) {
resetGlobals()
@@ -128,16 +126,6 @@
return false
}
-// TODO: Update this with correct flag when we change llvm-next.
-func isUsingLLvmNext(cfg *config) bool {
- for _, arg := range cfg.clangFlags {
- if arg == "-Wno-reorder-init-list" {
- return true
- }
- }
- return false
-}
-
func resetGlobals() {
// Set all global variables to a defined state.
UseLlvmNext = "unknown"
diff --git a/compiler_wrapper/cros_hardened_config_test.go b/compiler_wrapper/cros_hardened_config_test.go
index b459f1e..337b27f 100644
--- a/compiler_wrapper/cros_hardened_config_test.go
+++ b/compiler_wrapper/cros_hardened_config_test.go
@@ -16,8 +16,15 @@
const crosHardenedNoCCacheGoldenDir = "testdata/cros_hardened_noccache_golden"
const crosHardenedLlvmNextGoldenDir = "testdata/cros_hardened_llvmnext_golden"
-func TestCrosHardenedConfig(t *testing.T) {
+func withGoldenTestContext(t *testing.T, f func(ctx *testContext)) {
withTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+ f(ctx)
+ })
+}
+
+func TestCrosHardenedConfig(t *testing.T) {
+ withGoldenTestContext(t, func(ctx *testContext) {
useLlvmNext := false
useCCache := true
cfg, err := getConfig("cros.hardened", useCCache, useLlvmNext, "123")
@@ -31,7 +38,7 @@
}
func TestCrosHardenedConfigWithoutCCache(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
+ withGoldenTestContext(t, func(ctx *testContext) {
useLlvmNext := false
useCCache := false
cfg, err := getConfig("cros.hardened", useCCache, useLlvmNext, "123")
@@ -56,7 +63,7 @@
}
func TestCrosHardenedConfigWithLlvmNext(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
+ withGoldenTestContext(t, func(ctx *testContext) {
useLlvmNext := true
useCCache := true
cfg, err := getConfig("cros.hardened", useCCache, useLlvmNext, "123")
@@ -517,7 +524,7 @@
Name: "clang_specific_args.json",
Records: []goldenRecord{
{
- WrapperCmd: newGoldenCmd(clangX86_64, "-mno-movbe", "-pass-exit-codes", "-Wclobbered", "-Wno-psabi", "-Wlogical-op",
+ WrapperCmd: newGoldenCmd(clangX86_64, "-mno-movbe", "-Wclobbered", "-Wno-psabi", "-Wlogical-op",
"-Wmissing-parameter-type", "-Wold-style-declaration", "-Woverride-init", "-Wunsafe-loop-optimizations",
"-Wstrict-aliasing=abc", "-finline-limit=abc", mainCc),
Cmds: okResults,
@@ -531,18 +538,6 @@
Cmds: okResults,
},
{
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wno-error=unused-but-set-variable", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wno-unused-but-set-variable", mainCc),
- Cmds: okResults,
- },
- {
- WrapperCmd: newGoldenCmd(clangX86_64, "-Wunused-but-set-variable", mainCc),
- Cmds: okResults,
- },
- {
WrapperCmd: newGoldenCmd(clangX86_64, "-Xclang-only=-someflag", mainCc),
Cmds: okResults,
},
diff --git a/compiler_wrapper/cros_host_config_test.go b/compiler_wrapper/cros_host_config_test.go
index fee78e6..4f3b5cb 100644
--- a/compiler_wrapper/cros_host_config_test.go
+++ b/compiler_wrapper/cros_host_config_test.go
@@ -13,7 +13,7 @@
const crosGccHostGoldenDir = "testdata/cros_gcc_host_golden"
func TestCrosClangHostConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
+ withGoldenTestContext(t, func(ctx *testContext) {
useLlvmNext := false
useCCache := false
cfg, err := getConfig("cros.host", useCCache, useLlvmNext, "123")
@@ -43,7 +43,7 @@
}
func TestCrosGccHostConfig(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
+ withGoldenTestContext(t, func(ctx *testContext) {
useLlvmNext := false
useCCache := false
cfg, err := getConfig("cros.host", useCCache, useLlvmNext, "123")
diff --git a/compiler_wrapper/cros_llvm_next_flags.go b/compiler_wrapper/cros_llvm_next_flags.go
index 6cd7cd2..870e288 100644
--- a/compiler_wrapper/cros_llvm_next_flags.go
+++ b/compiler_wrapper/cros_llvm_next_flags.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+//go:build !android_llvm_next_flags
// +build !android_llvm_next_flags
package main
diff --git a/compiler_wrapper/cros_nonhardened_config_test.go b/compiler_wrapper/cros_nonhardened_config_test.go
index 4883c5f..3d413fb 100644
--- a/compiler_wrapper/cros_nonhardened_config_test.go
+++ b/compiler_wrapper/cros_nonhardened_config_test.go
@@ -12,6 +12,8 @@
func TestCrosNonHardenedConfig(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
useLlvmNext := false
useCCache := true
cfg, err := getConfig("cros.nonhardened", useCCache, useLlvmNext, "123")
diff --git a/compiler_wrapper/disable_werror_flag.go b/compiler_wrapper/disable_werror_flag.go
index 8f20b6f..cb770b7 100644
--- a/compiler_wrapper/disable_werror_flag.go
+++ b/compiler_wrapper/disable_werror_flag.go
@@ -14,15 +14,19 @@
"path"
"strconv"
"strings"
- "syscall"
)
const numWErrorEstimate = 30
-func shouldForceDisableWerror(env env, cfg *config) bool {
+func shouldForceDisableWerror(env env, cfg *config, ty compilerType) bool {
if cfg.isAndroidWrapper {
return cfg.useLlvmNext
}
+
+ // We only want this functionality for clang.
+ if ty != clangType {
+ return false
+ }
value, _ := env.getenv("FORCE_DISABLE_WERROR")
return value != ""
}
@@ -70,8 +74,12 @@
return 0, wrapErrorwithSourceLocf(err, "prebuffering stdin: %v", err)
}
- originalExitCode, err := wrapSubprocessErrorWithSourceLoc(originalCmd,
- env.run(originalCmd, getStdin(), originalStdoutBuffer, originalStderrBuffer))
+ var originalExitCode int
+ commitOriginalRusage, err := maybeCaptureRusage(env, originalCmd, func(willLogRusage bool) error {
+ originalExitCode, err = wrapSubprocessErrorWithSourceLoc(originalCmd,
+ env.run(originalCmd, getStdin(), originalStdoutBuffer, originalStderrBuffer))
+ return err
+ })
if err != nil {
return 0, err
}
@@ -85,6 +93,9 @@
bytes.Contains(originalStdoutBufferBytes, []byte("warnings-as-errors")) ||
bytes.Contains(originalStdoutBufferBytes, []byte("clang-diagnostic-")))
if !shouldRetry {
+ if err := commitOriginalRusage(originalExitCode); err != nil {
+ return 0, fmt.Errorf("commiting rusage: %v", err)
+ }
originalStdoutBuffer.WriteTo(env.stdout())
originalStderrBuffer.WriteTo(env.stderr())
return originalExitCode, nil
@@ -97,19 +108,32 @@
Args: disableWerrorFlags(originalCmd.Args),
EnvUpdates: originalCmd.EnvUpdates,
}
- retryExitCode, err := wrapSubprocessErrorWithSourceLoc(retryCommand,
- env.run(retryCommand, getStdin(), retryStdoutBuffer, retryStderrBuffer))
+
+ var retryExitCode int
+ commitRetryRusage, err := maybeCaptureRusage(env, retryCommand, func(willLogRusage bool) error {
+ retryExitCode, err = wrapSubprocessErrorWithSourceLoc(retryCommand,
+ env.run(retryCommand, getStdin(), retryStdoutBuffer, retryStderrBuffer))
+ return err
+ })
if err != nil {
return 0, err
}
+
// If -Wno-error fixed us, pretend that we never ran without -Wno-error. Otherwise, pretend
// that we never ran the second invocation.
if retryExitCode != 0 {
originalStdoutBuffer.WriteTo(env.stdout())
originalStderrBuffer.WriteTo(env.stderr())
+ if err := commitOriginalRusage(originalExitCode); err != nil {
+ return 0, fmt.Errorf("commiting rusage: %v", err)
+ }
return originalExitCode, nil
}
+ if err := commitRetryRusage(retryExitCode); err != nil {
+ return 0, fmt.Errorf("commiting rusage: %v", err)
+ }
+
retryStdoutBuffer.WriteTo(env.stdout())
retryStderrBuffer.WriteTo(env.stderr())
@@ -151,8 +175,8 @@
// Buildbots use a nonzero umask, which isn't quite what we want: these directories should
// be world-readable and world-writable.
- oldMask := syscall.Umask(0)
- defer syscall.Umask(oldMask)
+ oldMask := env.umask(0)
+ defer env.umask(oldMask)
// Allow root and regular users to write to this without issue.
if err := os.MkdirAll(cfg.newWarningsDir, 0777); err != nil {
diff --git a/compiler_wrapper/disable_werror_flag_test.go b/compiler_wrapper/disable_werror_flag_test.go
index 0c43dbe..592c35b 100644
--- a/compiler_wrapper/disable_werror_flag_test.go
+++ b/compiler_wrapper/disable_werror_flag_test.go
@@ -318,6 +318,8 @@
func withForceDisableWErrorTestContext(t *testing.T, work func(ctx *testContext)) {
withTestContext(t, func(ctx *testContext) {
+ ctx.NoteTestWritesToUmask()
+
ctx.env = []string{"FORCE_DISABLE_WERROR=1"}
work(ctx)
})
@@ -412,13 +414,21 @@
// Disable werror ON
ctx.cfg.useLlvmNext = true
- if !shouldForceDisableWerror(ctx, ctx.cfg) {
+ if !shouldForceDisableWerror(ctx, ctx.cfg, gccType) {
+ t.Errorf("disable Werror not enabled for Android with useLlvmNext")
+ }
+
+ if !shouldForceDisableWerror(ctx, ctx.cfg, clangType) {
t.Errorf("disable Werror not enabled for Android with useLlvmNext")
}
// Disable werror OFF
ctx.cfg.useLlvmNext = false
- if shouldForceDisableWerror(ctx, ctx.cfg) {
+ if shouldForceDisableWerror(ctx, ctx.cfg, gccType) {
+ t.Errorf("disable-Werror enabled for Android without useLlvmNext")
+ }
+
+ if shouldForceDisableWerror(ctx, ctx.cfg, clangType) {
t.Errorf("disable-Werror enabled for Android without useLlvmNext")
}
})
@@ -426,9 +436,25 @@
func TestChromeOSNoForceDisableWerror(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- if shouldForceDisableWerror(ctx, ctx.cfg) {
+ if shouldForceDisableWerror(ctx, ctx.cfg, gccType) {
t.Errorf("disable Werror enabled for ChromeOS without FORCE_DISABLE_WERROR set")
}
+
+ if shouldForceDisableWerror(ctx, ctx.cfg, clangType) {
+ t.Errorf("disable Werror enabled for ChromeOS without FORCE_DISABLE_WERROR set")
+ }
+ })
+}
+
+func TestChromeOSForceDisableWerrorOnlyAppliesToClang(t *testing.T) {
+ withForceDisableWErrorTestContext(t, func(ctx *testContext) {
+ if !shouldForceDisableWerror(ctx, ctx.cfg, clangType) {
+ t.Errorf("Disable -Werror should be enabled for clang.")
+ }
+
+ if shouldForceDisableWerror(ctx, ctx.cfg, gccType) {
+ t.Errorf("Disable -Werror should be disabled for gcc.")
+ }
})
}
diff --git a/compiler_wrapper/env.go b/compiler_wrapper/env.go
index 2c48ad3..c8f6ceb 100644
--- a/compiler_wrapper/env.go
+++ b/compiler_wrapper/env.go
@@ -10,9 +10,12 @@
"io"
"os"
"strings"
+ "syscall"
+ "time"
)
type env interface {
+ umask(int) int
getenv(key string) (string, bool)
environ() []string
getwd() string
@@ -20,6 +23,7 @@
stdout() io.Writer
stderr() io.Writer
run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error
+ runWithTimeout(cmd *command, duration time.Duration) error
exec(cmd *command) error
}
@@ -52,6 +56,10 @@
var _ env = (*processEnv)(nil)
+func (env *processEnv) umask(newmask int) (oldmask int) {
+ return syscall.Umask(newmask)
+}
+
func (env *processEnv) getenv(key string) (string, bool) {
return os.LookupEnv(key)
}
@@ -80,6 +88,10 @@
return execCmd(env, cmd)
}
+func (env *processEnv) runWithTimeout(cmd *command, duration time.Duration) error {
+ return runCmdWithTimeout(env, cmd, duration)
+}
+
func (env *processEnv) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
return runCmd(env, cmd, stdin, stdout, stderr)
}
@@ -108,6 +120,10 @@
return env.run(cmd, env.stdin(), env.stdout(), env.stderr())
}
+func (env *commandRecordingEnv) runWithTimeout(cmd *command, duration time.Duration) error {
+ return env.runWithTimeout(cmd, duration)
+}
+
func (env *commandRecordingEnv) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
stdoutBuffer := &bytes.Buffer{}
stderrBuffer := &bytes.Buffer{}
@@ -134,6 +150,11 @@
return env.env.exec(cmd)
}
+func (env *printingEnv) runWithTimeout(cmd *command, duration time.Duration) error {
+ printCmd(env, cmd)
+ return env.env.runWithTimeout(cmd, duration)
+}
+
func (env *printingEnv) run(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
printCmd(env, cmd)
return env.env.run(cmd, stdin, stdout, stderr)
diff --git a/compiler_wrapper/env_test.go b/compiler_wrapper/env_test.go
index e03d60a..b5bf65a 100644
--- a/compiler_wrapper/env_test.go
+++ b/compiler_wrapper/env_test.go
@@ -6,13 +6,17 @@
import (
"bytes"
+ "context"
+ "errors"
"flag"
"io/ioutil"
"os"
"os/exec"
+ "path"
"path/filepath"
"strings"
"testing"
+ "time"
)
// Attention: The tests in this file execute the test binary again with the `-run` flag.
@@ -164,6 +168,48 @@
})
}
+func TestRunWithTimeoutRunsTheGivenProcess(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ env, err := newProcessEnv()
+ if err != nil {
+ t.Fatalf("Unexpected error making new process env: %v", err)
+ }
+
+ tempFile := path.Join(ctx.tempDir, "some_file")
+ cmd := &command{
+ Path: "touch",
+ Args: []string{tempFile},
+ }
+ if err := env.runWithTimeout(cmd, time.Second*120); err != nil {
+ t.Fatalf("Unexpected error touch'ing %q: %v", tempFile, err)
+ }
+
+ // This should be fine, since `touch` should've created the file.
+ if _, err := os.Stat(tempFile); err != nil {
+ t.Errorf("Stat'ing temp file at %q failed: %v", tempFile, err)
+ }
+ })
+}
+
+func TestRunWithTimeoutReturnsErrorOnTimeout(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ env, err := newProcessEnv()
+ if err != nil {
+ t.Fatalf("Unexpected error making new process env: %v", err)
+ }
+
+ cmd := &command{
+ Path: "sleep",
+ Args: []string{"30"},
+ }
+
+ err = env.runWithTimeout(cmd, 100*time.Millisecond)
+ if !errors.Is(err, context.DeadlineExceeded) {
+ t.Errorf("Expected context.DeadlineExceeded after `sleep` timed out; got error: %v", err)
+ }
+ })
+}
+
func TestNewProcessEnvResolvesPwdAwayProperly(t *testing.T) {
// This test cannot be t.Parallel(), since it modifies our environment.
const envPwd = "PWD"
diff --git a/compiler_wrapper/gcc_flags.go b/compiler_wrapper/gcc_flags.go
index 7141d14..2c553e6 100644
--- a/compiler_wrapper/gcc_flags.go
+++ b/compiler_wrapper/gcc_flags.go
@@ -11,11 +11,7 @@
// Conversion for flags supported by clang but not gcc.
clangToGcc := map[string]string{
- "-march=goldmont": "-march=silvermont",
- "-march=goldmont-plus": "-march=silvermont",
- "-march=skylake": "-march=corei7",
- "-march=tigerlake": "-march=corei7",
- "-march=tremont": "-march=silvermont",
+ "-march=alderlake": "-march=skylake",
}
builder.transformArgs(func(arg builderArg) string {
diff --git a/compiler_wrapper/gcc_flags_test.go b/compiler_wrapper/gcc_flags_test.go
index 8de07a4..adf7201 100644
--- a/compiler_wrapper/gcc_flags_test.go
+++ b/compiler_wrapper/gcc_flags_test.go
@@ -34,11 +34,7 @@
in string
out string
}{
- {"-march=goldmont", "-march=silvermont"},
- {"-march=goldmont-plus", "-march=silvermont"},
- {"-march=skylake", "-march=corei7"},
- {"-march=tigerlake", "-march=corei7"},
- {"-march=tremont", "-march=silvermont"},
+ {"-march=alderlake", "-march=skylake"},
}
for _, tt := range tests {
diff --git a/compiler_wrapper/go.mod b/compiler_wrapper/go.mod
new file mode 100644
index 0000000..ce6df01
--- /dev/null
+++ b/compiler_wrapper/go.mod
@@ -0,0 +1,3 @@
+module chromium.org/toolchain-utils/compiler_wrapper
+
+go 1.13
diff --git a/compiler_wrapper/gomacc_flag.go b/compiler_wrapper/gomacc_flag.go
deleted file mode 100644
index ac298b1..0000000
--- a/compiler_wrapper/gomacc_flag.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "os"
-)
-
-func processGomaCccFlags(builder *commandBuilder) (gomaUsed bool, err error) {
- gomaPath := ""
- nextArgIsGomaPath := false
- builder.transformArgs(func(arg builderArg) string {
- if arg.fromUser {
- if arg.value == "--gomacc-path" {
- nextArgIsGomaPath = true
- return ""
- }
- if nextArgIsGomaPath {
- gomaPath = arg.value
- nextArgIsGomaPath = false
- return ""
- }
- }
- return arg.value
- })
- if nextArgIsGomaPath {
- return false, newUserErrorf("--gomacc-path given without value")
- }
- if gomaPath == "" {
- gomaPath, _ = builder.env.getenv("GOMACC_PATH")
- }
- if gomaPath != "" {
- if _, err := os.Lstat(gomaPath); err == nil {
- builder.wrapPath(gomaPath)
- return true, nil
- }
- }
- return false, nil
-}
diff --git a/compiler_wrapper/gomacc_flag_test.go b/compiler_wrapper/gomacc_flag_test.go
deleted file mode 100644
index d7b2b0b..0000000
--- a/compiler_wrapper/gomacc_flag_test.go
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright 2019 The Chromium OS Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-package main
-
-import (
- "path"
- "testing"
-)
-
-func TestCallGomaccIfEnvIsGivenAndValid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- // Create a file so the gomacc path is valid.
- ctx.writeFile(gomaPath, "")
- ctx.env = []string{"GOMACC_PATH=" + gomaPath}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gomaPath); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitGomaccIfEnvIsGivenButInvalid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- // Note: This path does not point to a valid file.
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- ctx.env = []string{"GOMACC_PATH=" + gomaPath}
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestCallGomaccIfArgIsGivenAndValid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- // Create a file so the gomacc path is valid.
- ctx.writeFile(gomaPath, "")
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc, "--gomacc-path", gomaPath)))
- if err := verifyPath(cmd, gomaPath); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, "--gomacc-path"); err != nil {
- t.Error(err)
- }
- if err := verifyArgCount(cmd, 0, gomaPath); err != nil {
- t.Error(err)
- }
- if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitGomaccIfArgIsGivenButInvalid(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- // Note: This path does not point to a valid file.
- gomaPath := path.Join(ctx.tempDir, "gomacc")
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc, "--gomacc-path", gomaPath)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestErrorOnGomaccArgWithoutValue(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc, "--gomacc-path")))
- if err := verifyNonInternalError(stderr, "--gomacc-path given without value"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitGomaccByDefault(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, mainCc)))
- if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/install_compiler_wrapper.sh b/compiler_wrapper/install_compiler_wrapper.sh
index 479b112..3a5b741 100755
--- a/compiler_wrapper/install_compiler_wrapper.sh
+++ b/compiler_wrapper/install_compiler_wrapper.sh
@@ -19,7 +19,16 @@
echo "/usr/bin/clang_host_wrapper"
sudo cp ../binary_search_tool/bisect_driver.py /usr/bin
echo "/usr/bin/clang_host_wrapper/bisect_driver.py"
+
# Update the target wrappers
+./build.py --config=cros.hardened --use_ccache=false --use_llvm_next=false --output_file=./sysroot_wrapper.hardened.noccache
+./build.py --config=cros.hardened --use_ccache=true --use_llvm_next=false --output_file=./sysroot_wrapper.hardened.ccache
+# Update clang target wrappers.
+sudo cp ./sysroot_wrapper.hardened.noccache ./sysroot_wrapper.hardened.ccache /usr/bin
+echo "Updated clang wrapper /usr/bin/sysroot_wrapper.hardened.noccache"
+echo "Updated clang wrapper /usr/bin/sysroot_wrapper.hardened.ccache"
+
+# Update GCC target wrappers.
for GCC in cross-x86_64-cros-linux-gnu/gcc cross-armv7a-cros-linux-gnueabihf/gcc cross-aarch64-cros-linux-gnu/gcc; do
if ! FILES="$(equery f ${GCC})"; then
if [[ $(equery l "${GCC}" 2>&1 | wc -c) -eq 0 ]]; then
@@ -29,12 +38,12 @@
# Something went wrong, and the equery above probably complained about it.
exit 1
fi
- ./build.py --config=cros.hardened --use_ccache=false --use_llvm_next=false --output_file=./sysroot_wrapper.hardened.noccache
- sudo mv ./sysroot_wrapper.hardened.noccache "$(grep sysroot_wrapper.hardened.noccache <<< "${FILES}")"
+ echo "Updating ${GCC} wrapper."
+ sudo cp ./sysroot_wrapper.hardened.noccache "$(grep sysroot_wrapper.hardened.noccache <<< "${FILES}")"
grep sysroot_wrapper.hardened.noccache <<< "${FILES}"
- ./build.py --config=cros.hardened --use_ccache=true --use_llvm_next=false --output_file=./sysroot_wrapper.hardened.ccache
- sudo mv ./sysroot_wrapper.hardened.ccache "$(grep sysroot_wrapper.hardened.ccache <<< "${FILES}")"
+ sudo cp ./sysroot_wrapper.hardened.ccache "$(grep sysroot_wrapper.hardened.ccache <<< "${FILES}")"
grep sysroot_wrapper.hardened.ccache <<< "${FILES}"
sudo cp ../binary_search_tool/bisect_driver.py "$(grep bisect_driver.py <<< "${FILES}")"
grep bisect_driver.py <<< "${FILES}"
done
+rm -f ./sysroot_wrapper.hardened.noccache ./sysroot_wrapper.hardened.ccache
diff --git a/compiler_wrapper/kernel_bug.go b/compiler_wrapper/kernel_bug.go
new file mode 100644
index 0000000..55817cb
--- /dev/null
+++ b/compiler_wrapper/kernel_bug.go
@@ -0,0 +1,25 @@
+// Copyright 2021 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+package main
+
+import (
+ "bytes"
+ "strings"
+)
+
+// crbug.com/1166017
+
+const kernelBugRetryLimit = 25
+
+// GCC will sometimes fail to wait on subprocesses due to this kernel bug. It always fails the
+// compilation and prints "Unknown error 512" in that case.
+func containsTracesOfKernelBug(buf []byte) bool {
+ return bytes.Contains(buf, []byte("Unknown error 512"))
+}
+
+func errorContainsTracesOfKernelBug(err error) bool {
+ // We'll get errors that look like "waitid: errno 512." Presumably, this isn't specific to
+ // waitid, so just try to match the "errno 512" ending.
+ return err != nil && strings.HasSuffix(err.Error(), "errno 512")
+}
diff --git a/compiler_wrapper/kernel_bug_test.go b/compiler_wrapper/kernel_bug_test.go
new file mode 100644
index 0000000..3c7bccf
--- /dev/null
+++ b/compiler_wrapper/kernel_bug_test.go
@@ -0,0 +1,74 @@
+// Copyright 2021 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+package main
+
+import (
+ "errors"
+ "io"
+ "testing"
+)
+
+func getErrorIndicatingKernelBug() error {
+ return errors.New("waitid: errno 512")
+}
+
+func TestWrapperRetriesCompilationsOnApparentKernelBugsSurfacedInGo(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
+ switch {
+ case ctx.cmdCount < kernelBugRetryLimit:
+ return getErrorIndicatingKernelBug()
+
+ case ctx.cmdCount == kernelBugRetryLimit:
+ return nil
+
+ default:
+ t.Fatalf("unexpected command: %#v", cmd)
+ return nil
+ }
+ }
+ ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
+ if ctx.cmdCount != kernelBugRetryLimit {
+ t.Errorf("expected %d retries. Got: %d", kernelBugRetryLimit, ctx.cmdCount)
+ }
+ })
+}
+
+func TestWrapperRetriesCompilationsOnApparentKernelBugsSurfacedInGCC(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
+ if ctx.cmdCount >= kernelBugRetryLimit {
+ return nil
+ }
+ _, err := io.WriteString(stderr, "fatal error: failed to get exit status: Unknown error 512")
+ if err != nil {
+ t.Fatalf("Failed writing to stdout: %v", err)
+ }
+ return newExitCodeError(1)
+ }
+ ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
+ if ctx.cmdCount != kernelBugRetryLimit {
+ t.Errorf("expected %d retries. Got: %d", kernelBugRetryLimit, ctx.cmdCount)
+ }
+ })
+}
+
+func TestWrapperOnlyRetriesCompilationAFiniteNumberOfTimes(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ kernelBugErr := getErrorIndicatingKernelBug()
+ ctx.cmdMock = func(cmd *command, stdin io.Reader, stdout io.Writer, stderr io.Writer) error {
+ if ctx.cmdCount > kernelBugRetryLimit {
+ t.Fatal("command count exceeded kernel bug retry limit; infinite loop?")
+ }
+ return kernelBugErr
+ }
+ stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyInternalError(stderr); err != nil {
+ t.Errorf("Internal error wasn't reported: %v", err)
+ }
+ if ctx.cmdCount != kernelBugRetryLimit {
+ t.Errorf("expected %d retries. Got: %d", kernelBugRetryLimit, ctx.cmdCount)
+ }
+ })
+}
diff --git a/compiler_wrapper/libc_exec.go b/compiler_wrapper/libc_exec.go
index d986773..a7a561b 100644
--- a/compiler_wrapper/libc_exec.go
+++ b/compiler_wrapper/libc_exec.go
@@ -18,7 +18,20 @@
// // Since fork() brings us to one thread, we can only use async-signal-safe funcs below.
// pid_t pid = fork();
// if (pid == 0) {
-// execve(pathname, argv, envp);
+// // crbug.com/1166017: we're (very rarely) getting ERESTARTSYS on some builders.
+// // Documentation indicates that this is a bug in the kernel. Work around it by
+// // retrying. 25 is an arbitrary retry number that Should Be Enough For Anyone(TM).
+// int i = 0;
+// for (; i < 25; i++) {
+// execve(pathname, argv, envp);
+// if (errno != 512) {
+// break;
+// }
+// // Sleep a bit. Not sure if this helps, but if the condition we're seeing is
+// // transient, it *hopefully* should. nanosleep isn't async-signal safe, so
+// // we have to live with sleep()
+// sleep(1);
+// }
// fprintf(stderr, "exec failed (errno: %d)\n", errno);
// _exit(1);
// }
diff --git a/compiler_wrapper/libgcc_flags.go b/compiler_wrapper/libgcc_flags.go
new file mode 100644
index 0000000..72fa838
--- /dev/null
+++ b/compiler_wrapper/libgcc_flags.go
@@ -0,0 +1,25 @@
+// Copyright 2021 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package main
+
+import (
+ "strings"
+)
+
+// Add "-static-libgcc" flag to compiler command line unless
+// already specified by user or user has passed "-shared-libgcc".
+func processLibGCCFlags(builder *commandBuilder) {
+ fromUser := false
+ for _, arg := range builder.args {
+ if arg.fromUser && (strings.HasPrefix(arg.value, "-shared-libgcc") ||
+ strings.HasPrefix(arg.value, "-static-libgcc")) {
+ fromUser = true
+ break
+ }
+ }
+ if !fromUser {
+ builder.addPreUserArgs("-static-libgcc")
+ }
+}
diff --git a/compiler_wrapper/libgcc_flags_test.go b/compiler_wrapper/libgcc_flags_test.go
new file mode 100644
index 0000000..717c0e5
--- /dev/null
+++ b/compiler_wrapper/libgcc_flags_test.go
@@ -0,0 +1,57 @@
+// Copyright 2021 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package main
+
+import (
+ "testing"
+)
+
+func TestDefaultStaticLibGCC(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ runWithCompiler := func(compiler string) {
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(compiler, mainCc)))
+ if err := verifyArgCount(cmd, 1, "-static-libgcc"); err != nil {
+ t.Error(err)
+ }
+ }
+
+ runWithCompiler(gccX86_64)
+ runWithCompiler(clangX86_64)
+ })
+}
+
+func TestKeepStaticLibGCCWithUserArgs(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ runWithCompiler := func(compiler string) {
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(compiler, "-static-libgcc", mainCc)))
+ if err := verifyArgOrder(cmd, "-static-libgcc", mainCc); err != nil {
+ t.Error(err)
+ }
+ }
+
+ runWithCompiler(gccX86_64)
+ runWithCompiler(clangX86_64)
+ })
+}
+
+func TestNoAddedStaticLibGCCWithSharedLibGCC(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ runWithCompiler := func(compiler string) {
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(compiler, "-shared-libgcc", mainCc)))
+ if err := verifyArgCount(cmd, 0, "-static-libgcc"); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgCount(cmd, 1, "-shared-libgcc"); err != nil {
+ t.Error(err)
+ }
+ }
+
+ runWithCompiler(gccX86_64)
+ runWithCompiler(clangX86_64)
+ })
+}
diff --git a/compiler_wrapper/remote_build_flag_test.go b/compiler_wrapper/remote_build_flag_test.go
new file mode 100644
index 0000000..4a89417
--- /dev/null
+++ b/compiler_wrapper/remote_build_flag_test.go
@@ -0,0 +1,251 @@
+// Copyright 2019 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package main
+
+import (
+ "os"
+ "path"
+ "reflect"
+ "testing"
+)
+
+func TestCommandlineFlagParsing(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ type testCase struct {
+ extraFlags []string
+ // If this is nonempty, expectedValue is ignored. Otherwise, expectedValue
+ // has the expected value for the flag, and expectedCommand has the expected
+ // (extra) flags in the builder after filtering.
+ expectedError string
+ expectedValue string
+ expectedExtraFlags []string
+ }
+
+ const flagName = "--flag"
+ testCases := []testCase{
+ {
+ extraFlags: nil,
+ expectedError: errNoSuchCmdlineArg.Error(),
+ },
+ {
+ extraFlags: []string{flagName + "a"},
+ expectedError: errNoSuchCmdlineArg.Error(),
+ },
+ {
+ extraFlags: []string{flagName},
+ expectedError: "flag \"" + flagName + "\" requires a value",
+ },
+ {
+ extraFlags: []string{flagName, "foo"},
+ expectedValue: "foo",
+ expectedExtraFlags: nil,
+ },
+ {
+ extraFlags: []string{flagName + "=foo"},
+ expectedValue: "foo",
+ expectedExtraFlags: nil,
+ },
+ {
+ extraFlags: []string{flagName + "="},
+ expectedValue: "",
+ expectedExtraFlags: nil,
+ },
+ {
+ extraFlags: []string{flagName + "=foo", flagName + "=bar"},
+ expectedValue: "foo",
+ expectedExtraFlags: []string{flagName + "=bar"},
+ },
+ }
+
+ for _, testCase := range testCases {
+ cmd := ctx.newCommand(gccX86_64, testCase.extraFlags...)
+ builder, err := newCommandBuilder(ctx, ctx.cfg, cmd)
+ if err != nil {
+ t.Fatalf("Failed creating a command builder: %v", err)
+ }
+
+ flagValue, err := removeOneUserCmdlineFlagWithValue(builder, flagName)
+ if err != nil {
+ if testCase.expectedError == "" {
+ t.Errorf("given extra flags %q, got unexpected error removing %q: %v", testCase.extraFlags, flagName, err)
+ continue
+ }
+
+ if e := err.Error(); e != testCase.expectedError {
+ t.Errorf("given extra flags %q, got error %q; wanted %q", testCase.extraFlags, e, testCase.expectedError)
+ }
+ continue
+ }
+
+ if testCase.expectedError != "" {
+ t.Errorf("given extra flags %q, got no error, but expected %q", testCase.extraFlags, testCase.expectedError)
+ continue
+ }
+
+ if flagValue != testCase.expectedValue {
+ t.Errorf("given extra flags %q, got value %q, but expected %q", testCase.extraFlags, flagValue, testCase.expectedValue)
+ }
+
+ currentFlags := []string{}
+ // Chop off the first arg, which should just be the compiler
+ for _, a := range builder.args {
+ currentFlags = append(currentFlags, a.value)
+ }
+
+ sameFlags := (len(currentFlags) == 0 && len(testCase.expectedExtraFlags) == 0) || reflect.DeepEqual(currentFlags, testCase.expectedExtraFlags)
+ if !sameFlags {
+ t.Errorf("given extra flags %q, got post-removal flags %q, but expected %q", testCase.extraFlags, currentFlags, testCase.expectedExtraFlags)
+ }
+ }
+ })
+}
+
+func TestCallGomaccIfEnvIsGivenAndValid(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ ctx.env = []string{"GOMACC_PATH=" + gomaPath}
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyPath(cmd, gomaPath); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestOmitGomaccIfEnvIsGivenButInvalid(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ if err := os.Remove(gomaPath); err != nil {
+ t.Fatalf("failed removing fake goma file at %q: %v", gomaPath, err)
+ }
+
+ ctx.env = []string{"GOMACC_PATH=" + gomaPath}
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestCallGomaccIfArgIsGivenAndValid(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--gomacc-path", gomaPath)))
+ if err := verifyPath(cmd, gomaPath); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgCount(cmd, 0, "--gomacc-path"); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgCount(cmd, 0, gomaPath); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgOrder(cmd, gccX86_64+".real", mainCc); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestOmitGomaccIfArgIsGivenButInvalid(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ if err := os.Remove(gomaPath); err != nil {
+ t.Fatalf("failed removing fake goma file at %q: %v", gomaPath, err)
+ }
+
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--gomacc-path", gomaPath)))
+ if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestErrorOnGomaccArgWithoutValue(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--gomacc-path")))
+ if err := verifyNonInternalError(stderr, "flag \"--gomacc-path\" requires a value"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestOmitGomaccByDefault(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc)))
+ if err := verifyPath(cmd, gccX86_64+".real"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func withGomaccTestContext(t *testing.T, f func(*testContext, string)) {
+ withTestContext(t, func(ctx *testContext) {
+ gomaPath := path.Join(ctx.tempDir, "gomacc")
+ // Create a file so the gomacc path is valid.
+ ctx.writeFile(gomaPath, "")
+ f(ctx, gomaPath)
+ })
+}
+
+func TestRewrapperDefersToTheWrapperProperly(t *testing.T) {
+ withTestContext(t, func(ctx *testContext) {
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--rewrapper-path", "/rewrapper", "--rewrapper-cfg", "/some-cfg", "some", "other", "args")))
+ if err := verifyPath(cmd, "/rewrapper"); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgOrder(cmd, "-cfg", "/some-cfg", gccX86_64+".real", mainCc, "some", "other", "args"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestRewrapperCfgMustBePrsentIfRewrapperPathIs(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--rewrapper-path", "/rewrapper")))
+ if err := verifyNonInternalError(stderr, "--rewrapper-cfg must be specified if --rewrapper-path is"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestRewrapperPathMustBePrsentIfRewrapperCfgIs(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--rewrapper-cfg", "/some-cfg")))
+ if err := verifyNonInternalError(stderr, "--rewrapper-path must be specified if --rewrapper-cfg is"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestRewrapperAndGomaAreMutuallyExclusive(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ stderr := ctx.mustFail(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--rewrapper-path", "/rewrapper", "--rewrapper-cfg", "/some-cfg", "--gomacc-path", gomaPath)))
+ if err := verifyNonInternalError(stderr, "rewrapper and gomacc are mutually exclusive"); err != nil {
+ t.Error(err)
+ }
+ })
+}
+
+func TestRewrapperBlocksGomaInheritanceFromEnv(t *testing.T) {
+ withGomaccTestContext(t, func(ctx *testContext, gomaPath string) {
+ ctx.env = []string{"GOMACC_PATH=" + gomaPath}
+ cmd := ctx.must(callCompiler(ctx, ctx.cfg,
+ ctx.newCommand(gccX86_64, mainCc, "--rewrapper-path", "/rewrapper", "--rewrapper-cfg", "/some-cfg")))
+ if err := verifyPath(cmd, "/rewrapper"); err != nil {
+ t.Error(err)
+ }
+ if err := verifyArgOrder(cmd, "-cfg", "/some-cfg", gccX86_64+".real", mainCc); err != nil {
+ t.Error(err)
+ }
+ })
+}
diff --git a/compiler_wrapper/remote_build_flags.go b/compiler_wrapper/remote_build_flags.go
new file mode 100644
index 0000000..fc26c93
--- /dev/null
+++ b/compiler_wrapper/remote_build_flags.go
@@ -0,0 +1,160 @@
+// Copyright 2019 The Chromium OS Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package main
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "strings"
+)
+
+var errNoSuchCmdlineArg = errors.New("no such commandline argument")
+
+// Removes one flag from `builder`, assuming that a value follows the flag. Two formats are
+// supported for this: `--foo=bar` and `--foo bar`. In either case, "bar" will be returned as the
+// `value`.
+//
+// If no flag is found on the commandline, this returns the `errNoSuchCmdlineArg` error. `builder`
+// is unmodified if this error is returned, but its contents are unspecified if any other error is
+// returned.
+//
+// In the case of multiple such flags, only the first encountered will be removed.
+func removeOneUserCmdlineFlagWithValue(builder *commandBuilder, flagName string) (flagValue string, err error) {
+ const (
+ searchingForFlag uint8 = iota
+ searchingForValue
+ searchComplete
+ )
+
+ flagRequiresAValue := func() error { return newUserErrorf("flag %q requires a value", flagName) }
+ searchState := searchingForFlag
+ builder.transformArgs(func(arg builderArg) string {
+ if err != nil {
+ return arg.value
+ }
+
+ switch searchState {
+ case searchingForFlag:
+ if !arg.fromUser {
+ return arg.value
+ }
+
+ if arg.value == flagName {
+ searchState = searchingForValue
+ return ""
+ }
+
+ isArgEq := strings.HasPrefix(arg.value, flagName) && arg.value[len(flagName)] == '='
+ if !isArgEq {
+ return arg.value
+ }
+
+ flagValue = arg.value[len(flagName)+1:]
+ searchState = searchComplete
+ return ""
+
+ case searchingForValue:
+ if !arg.fromUser {
+ err = flagRequiresAValue()
+ return arg.value
+ }
+
+ flagValue = arg.value
+ searchState = searchComplete
+ return ""
+
+ case searchComplete:
+ return arg.value
+
+ default:
+ panic(fmt.Sprintf("unknown search state: %v", searchState))
+ }
+ })
+
+ if err != nil {
+ return "", err
+ }
+
+ switch searchState {
+ case searchingForFlag:
+ return "", errNoSuchCmdlineArg
+
+ case searchingForValue:
+ return "", flagRequiresAValue()
+
+ case searchComplete:
+ return flagValue, nil
+
+ default:
+ panic(fmt.Sprintf("unknown search state: %v", searchState))
+ }
+}
+
+func processGomaCccFlags(builder *commandBuilder, inheritFromEnv bool) (gomaUsed bool, err error) {
+ gomaPath, err := removeOneUserCmdlineFlagWithValue(builder, "--gomacc-path")
+ if err != nil && err != errNoSuchCmdlineArg {
+ return false, err
+ }
+
+ if inheritFromEnv && (err == errNoSuchCmdlineArg || gomaPath == "") {
+ gomaPath, _ = builder.env.getenv("GOMACC_PATH")
+ }
+
+ if gomaPath != "" {
+ if _, err := os.Lstat(gomaPath); err == nil {
+ builder.wrapPath(gomaPath)
+ return true, nil
+ }
+ }
+ return false, nil
+}
+
+func processRewrapperCcFlags(builder *commandBuilder) (rewrapperUsed bool, err error) {
+ rewrapperPath, pathErr := removeOneUserCmdlineFlagWithValue(builder, "--rewrapper-path")
+ if pathErr != nil && pathErr != errNoSuchCmdlineArg {
+ return false, err
+ }
+
+ rewrapperCfg, cfgErr := removeOneUserCmdlineFlagWithValue(builder, "--rewrapper-cfg")
+ if cfgErr != nil && cfgErr != errNoSuchCmdlineArg {
+ return false, err
+ }
+
+ if pathErr == errNoSuchCmdlineArg {
+ if cfgErr != errNoSuchCmdlineArg {
+ return false, newUserErrorf("--rewrapper-path must be specified if --rewrapper-cfg is")
+ }
+ return false, nil
+ }
+
+ if cfgErr == errNoSuchCmdlineArg {
+ return false, newUserErrorf("--rewrapper-cfg must be specified if --rewrapper-path is")
+ }
+
+ // It's unclear that we should have a similar fallback to gomacc if --rewrapper-path doesn't
+ // exist, so don't until it's obviously necessary.
+ builder.wrapPath(rewrapperPath, "-cfg", rewrapperCfg)
+ return true, nil
+}
+
+func processRemoteBuildFlags(builder *commandBuilder) (remoteBuildUsed bool, err error) {
+ rewrapperUsed, err := processRewrapperCcFlags(builder)
+ if err != nil {
+ return rewrapperUsed, err
+ }
+
+ inheritGomaFromEnv := !rewrapperUsed
+ gomaUsed, err := processGomaCccFlags(builder, inheritGomaFromEnv)
+ remoteBuildUsed = gomaUsed || rewrapperUsed
+ if err != nil {
+ return remoteBuildUsed, err
+ }
+
+ if gomaUsed && rewrapperUsed {
+ return true, newUserErrorf("rewrapper and gomacc are mutually exclusive")
+ }
+ return remoteBuildUsed, nil
+}
diff --git a/compiler_wrapper/reset_compiler_wrapper.sh b/compiler_wrapper/reset_compiler_wrapper.sh
new file mode 100755
index 0000000..523e972
--- /dev/null
+++ b/compiler_wrapper/reset_compiler_wrapper.sh
@@ -0,0 +1,42 @@
+#!/bin/bash -eux
+#
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If your compiler wrapper ends up broken, you can run this script to try to
+# restore it to a working version. We can only use artifacts we download from
+# gs://, since it's kind of hard to build a working compiler with a broken
+# compiler wrapper. ;)
+
+if [[ ! -e "/etc/cros_chroot_version" ]]; then
+ echo "Run me inside of the chroot."
+ exit 1
+fi
+
+packages_to_reemerge=(
+ # We want to reemerge the host wrapper...
+ sys-devel/llvm
+)
+
+gcc_wrappers=(
+ cross-x86_64-cros-linux-gnu/gcc
+ cross-armv7a-cros-linux-gnueabihf/gcc
+ cross-aarch64-cros-linux-gnu/gcc
+)
+
+# ...and any existing target wrappers.
+for gcc in "${gcc_wrappers[@]}"; do
+ # cheap check for whether or not the package in question is already installed
+ if ls /var/db/pkg/"${gcc}"-* >& /dev/null; then
+ packages_to_reemerge+=( "${gcc}" )
+ fi
+done
+
+# Ensure that we don't pick up any broken binpkgs for these when we install
+# them below.
+for pkg in "${packages_to_reemerge[@]}"; do
+ sudo rm -f "/var/lib/portage/pkgs/${pkg}"*
+done
+
+sudo emerge -j16 -G "${packages_to_reemerge[@]}"
diff --git a/compiler_wrapper/rusage_flag.go b/compiler_wrapper/rusage_flag.go
index 690308b..6346960 100644
--- a/compiler_wrapper/rusage_flag.go
+++ b/compiler_wrapper/rusage_flag.go
@@ -5,19 +5,23 @@
package main
import (
+ "encoding/json"
"fmt"
"os"
"path/filepath"
- "strings"
"syscall"
"time"
)
func getRusageLogFilename(env env) string {
- value, _ := env.getenv("GETRUSAGE")
+ value, _ := env.getenv("TOOLCHAIN_RUSAGE_OUTPUT")
return value
}
+func isRusageEnabled(env env) bool {
+ return getRusageLogFilename(env) != ""
+}
+
func lockFileExclusive(fd uintptr) error {
maxTries := 100
for i := 0; i < maxTries; i++ {
@@ -38,67 +42,119 @@
return fmt.Errorf("locking file failed after %d tries", maxTries)
}
-func logRusage(env env, logFileName string, compilerCmd *command) (exitCode int, err error) {
- rusageBefore := syscall.Rusage{}
- if err := syscall.Getrusage(syscall.RUSAGE_CHILDREN, &rusageBefore); err != nil {
- return 0, err
- }
- compilerCmdWithoutRusage := &command{
+type rusageLog struct {
+ ExitCode int `json:"exit_code"`
+ ElapsedRealTime float64 `json:"elapsed_real_time"`
+ ElapsedUserTime float64 `json:"elapsed_user_time"`
+ ElapsedSysTime float64 `json:"elapsed_sys_time"`
+ MaxMemUsed int64 `json:"max_mem_used"`
+ Compiler string `json:"compiler"`
+ CompilerArgs []string `json:"compiler_args"`
+ WorkingDirectory string `json:"working_directory"`
+}
+
+func removeRusageFromCommand(compilerCmd *command) *command {
+ return &command{
Path: compilerCmd.Path,
Args: compilerCmd.Args,
- EnvUpdates: append(compilerCmd.EnvUpdates, "GETRUSAGE="),
+ EnvUpdates: append(compilerCmd.EnvUpdates, "TOOLCHAIN_RUSAGE_OUTPUT="),
+ }
+}
+
+// maybeCaptureRusage captures rusage for execution of action()
+// unless action returns an error or logFileName is ""
+// a function is returned which saves the rusage log data at logFileName unless logFileName is ""
+// an error is returned if action returns an error, or rusage commands in syscall fails
+func maybeCaptureRusage(env env, compilerCmd *command, action func(willLogRusage bool) error) (maybeCommitToFile func(exitCode int) error, err error) {
+ logFileName := getRusageLogFilename(env)
+ willLogRusage := isRusageEnabled(env)
+ if !willLogRusage {
+ if err := action(willLogRusage); err != nil {
+ return nil, err
+ }
+ return func(int) error { return nil }, nil
+ }
+
+ rusageBefore := syscall.Rusage{}
+ if err := syscall.Getrusage(syscall.RUSAGE_CHILDREN, &rusageBefore); err != nil {
+ return nil, fmt.Errorf("getting initial rusage: %v", err)
}
startTime := time.Now()
- exitCode, err = wrapSubprocessErrorWithSourceLoc(compilerCmdWithoutRusage,
- env.run(compilerCmdWithoutRusage, env.stdin(), env.stdout(), env.stderr()))
- if err != nil {
- return 0, err
+
+ if err := action(willLogRusage); err != nil {
+ return nil, err
}
+
elapsedRealTime := time.Since(startTime)
rusageAfter := syscall.Rusage{}
if err := syscall.Getrusage(syscall.RUSAGE_CHILDREN, &rusageAfter); err != nil {
- return 0, err
- }
- elapsedSysTime := time.Duration(rusageAfter.Stime.Nano()-rusageBefore.Stime.Nano()) * time.Nanosecond
- elapsedUserTime := time.Duration(rusageAfter.Utime.Nano()-rusageBefore.Utime.Nano()) * time.Nanosecond
- // Note: We assume that the compiler takes more heap than any other
- // subcommands that we might have executed before.
- maxMemUsed := rusageAfter.Maxrss
- absCompilerPath := getAbsCmdPath(env, compilerCmd)
-
- if err := os.MkdirAll(filepath.Dir(logFileName), 0777); err != nil {
- return 0, wrapErrorwithSourceLocf(err, "error creating rusage log directory %s", logFileName)
+ return nil, fmt.Errorf("getting final rusage: %v", err)
}
- timeUnit := float64(time.Second)
- data := fmt.Sprintf("%.5f : %.5f : %.5f : %d : %s : %s\n",
- float64(elapsedRealTime)/timeUnit, float64(elapsedUserTime)/timeUnit, float64(elapsedSysTime)/timeUnit,
- maxMemUsed, absCompilerPath,
- strings.Join(append([]string{filepath.Base(absCompilerPath)}, compilerCmd.Args...), " "))
+ return func(exitCode int) error {
+ elapsedSysTime := time.Duration(rusageAfter.Stime.Nano()-rusageBefore.Stime.Nano()) * time.Nanosecond
+ elapsedUserTime := time.Duration(rusageAfter.Utime.Nano()-rusageBefore.Utime.Nano()) * time.Nanosecond
+ // Note: We assume that the compiler takes more heap than any other
+ // subcommands that we might have executed before.
+ maxMemUsed := rusageAfter.Maxrss
+ absCompilerPath := getAbsCmdPath(env, compilerCmd)
- // Note: using file mode 0666 so that a root-created log is writable by others.
- logFile, err := os.OpenFile(logFileName, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0666)
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "creating rusage logfile %s", logFileName)
- }
+ // We need to temporarily set umask to 0 to ensure 777 permissions are actually 777
+ // This effects builderbots in particular
+ oldMask := env.umask(0)
+ defer env.umask(oldMask)
- // O_APPEND's atomicity guarantees are only for writes up to a certain size. If we don't
- // lock the file, we might end up with corrupted records.
- //
- // Note that Close()'ing the file releases all associated locks.
- if err := lockFileExclusive(logFile.Fd()); err != nil {
- _ = logFile.Close()
- return 0, wrapErrorwithSourceLocf(err, "locking rusage logfile %s: %v", logFileName, err)
- }
+ // We want to know what package is being compiled. The working directory gives us a good clue.
+ cwd, err := os.Getwd()
+ if err != nil {
+ return wrapErrorwithSourceLocf(err, "error getting working directory for rusage log")
+ }
- _, err = logFile.WriteString(data)
- closeErr := logFile.Close()
- if err != nil {
- return 0, wrapErrorwithSourceLocf(err, "writing to rusage logfile %s: %v", logFileName, err)
- }
- if closeErr != nil {
- return 0, wrapErrorwithSourceLocf(err, "closing rusage logfile %s: %v", logFileName, closeErr)
- }
+ if err := os.MkdirAll(filepath.Dir(logFileName), 0777); err != nil {
+ return wrapErrorwithSourceLocf(err, "error creating rusage log directory %s", logFileName)
+ }
- return exitCode, nil
+ timeUnit := float64(time.Second)
+
+ logEntry := rusageLog{
+ ExitCode: exitCode,
+ ElapsedRealTime: float64(elapsedRealTime) / timeUnit,
+ ElapsedUserTime: float64(elapsedUserTime) / timeUnit,
+ ElapsedSysTime: float64(elapsedSysTime) / timeUnit,
+ MaxMemUsed: maxMemUsed,
+ Compiler: absCompilerPath,
+ CompilerArgs: compilerCmd.Args,
+ WorkingDirectory: cwd,
+ }
+
+ // Note: using file mode 0666 so that a root-created log is writable by others.
+ logFile, err := os.OpenFile(logFileName, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0666)
+ if err != nil {
+ return wrapErrorwithSourceLocf(err, "creating rusage logfile %s", logFileName)
+ }
+
+ // O_APPEND's atomicity guarantees are only for writes up to a certain size. If we don't
+ // lock the file, we might end up with corrupted records.
+ //
+ // Note that Close()'ing the file releases all associated locks.
+ if err := lockFileExclusive(logFile.Fd()); err != nil {
+ _ = logFile.Close()
+ return wrapErrorwithSourceLocf(err, "locking rusage logfile %s: %v", logFileName, err)
+ }
+
+ if err := json.NewEncoder(logFile).Encode(logEntry); err != nil {
+ _ = logFile.Close()
+ return wrapErrorwithSourceLocf(err, "converting rusage logfile entry to JSON %v", logEntry)
+ }
+
+ closeErr := logFile.Close()
+ if err != nil {
+ return wrapErrorwithSourceLocf(err, "writing to rusage logfile %s: %v", logFileName, err)
+ }
+ if closeErr != nil {
+ return wrapErrorwithSourceLocf(err, "closing rusage logfile %s: %v", logFileName, closeErr)
+ }
+
+ return nil
+ }, nil
}
diff --git a/compiler_wrapper/rusage_flag_test.go b/compiler_wrapper/rusage_flag_test.go
index bd0f980..439cfd1 100644
--- a/compiler_wrapper/rusage_flag_test.go
+++ b/compiler_wrapper/rusage_flag_test.go
@@ -5,6 +5,7 @@
package main
import (
+ "encoding/json"
"errors"
"fmt"
"io"
@@ -12,7 +13,6 @@
"os"
"path/filepath"
"regexp"
- "strconv"
"strings"
"testing"
)
@@ -73,7 +73,7 @@
func TestCreateDirAndFileForLogRusage(t *testing.T) {
withLogRusageTestContext(t, func(ctx *testContext) {
logFileName := filepath.Join(ctx.tempDir, "somedir", "rusage.log")
- ctx.env = []string{"GETRUSAGE=" + logFileName}
+ ctx.env = []string{"TOOLCHAIN_RUSAGE_OUTPUT=" + logFileName}
ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
if _, err := os.Stat(logFileName); err != nil {
@@ -85,37 +85,32 @@
func TestLogRusageFileContent(t *testing.T) {
withLogRusageTestContext(t, func(ctx *testContext) {
logFileName := filepath.Join(ctx.tempDir, "rusage.log")
- ctx.env = []string{"GETRUSAGE=" + logFileName}
+ ctx.env = []string{"TOOLCHAIN_RUSAGE_OUTPUT=" + logFileName}
ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
data, err := ioutil.ReadFile(logFileName)
if err != nil {
t.Errorf("could not read the rusage log file. Error: %s", err)
}
- // Example output:
- // 0.100318 : 0.103412 : 0.096386 : 6508 : /tmp/compiler_wrapper036306868/x86_64-cros-linux-gnu-gcc.real : x86_64-cros-linux-gnu-gcc.real --sysroot=/tmp/compiler_wrapper036306868/usr/x86_64-cros-linux-gnu main.cc -mno-movbe
- logParts := strings.Split(string(data), " : ")
- if len(logParts) != 6 {
- t.Errorf("unexpected number of rusage log parts. Got: %s", logParts)
+
+ rlog := rusageLog{}
+
+ if err := json.Unmarshal(data, &rlog); err != nil {
+ t.Fatalf("rusage log could not be unmarshalled. Got: %s", data)
}
- // First 3 numbers are times in seconds.
- for i := 0; i < 3; i++ {
- if _, err := strconv.ParseFloat(logParts[i], 64); err != nil {
- t.Errorf("unexpected value for index %d. Got: %s", i, logParts[i])
- }
+ if rlog.Compiler != filepath.Join(ctx.tempDir, gccX86_64+".real") {
+ t.Errorf("unexpected compiler path. Got: %s", rlog.Compiler)
}
- // Then an int for the memory usage
- if _, err := strconv.ParseInt(logParts[3], 10, 64); err != nil {
- t.Errorf("unexpected mem usage. Got: %s", logParts[3])
+ if matched, _ := regexp.MatchString("--sysroot=.*", rlog.CompilerArgs[0]); !matched {
+ t.Errorf("unexpected compiler args. Got: %s", rlog.CompilerArgs)
}
- // Then the full path of the compiler
- if logParts[4] != filepath.Join(ctx.tempDir, gccX86_64+".real") {
- t.Errorf("unexpected compiler path. Got: %s", logParts[4])
+ cwd, err := os.Getwd()
+ if err != nil {
+ t.Fatalf("Failed to get current working directory: %v", err)
}
- // Then the arguments, prefixes with the compiler basename
- if matched, _ := regexp.MatchString("x86_64-cros-linux-gnu-gcc.real --sysroot=.* main.cc", logParts[5]); !matched {
- t.Errorf("unexpected compiler args. Got: %s", logParts[5])
+ if rlog.WorkingDirectory != cwd {
+ t.Errorf("Unexpected working directory. Got: %q, Want: %q", rlog.WorkingDirectory, cwd)
}
})
}
@@ -123,7 +118,7 @@
func TestLogRusageAppendsToFile(t *testing.T) {
withLogRusageTestContext(t, func(ctx *testContext) {
logFileName := filepath.Join(ctx.tempDir, "rusage.log")
- ctx.env = []string{"GETRUSAGE=" + logFileName}
+ ctx.env = []string{"TOOLCHAIN_RUSAGE_OUTPUT=" + logFileName}
ctx.must(callCompiler(ctx, ctx.cfg, ctx.newCommand(gccX86_64, mainCc)))
data, err := ioutil.ReadFile(logFileName)
@@ -164,7 +159,9 @@
func withLogRusageTestContext(t *testing.T, work func(ctx *testContext)) {
withTestContext(t, func(ctx *testContext) {
- ctx.env = []string{"GETRUSAGE=" + filepath.Join(ctx.tempDir, "rusage.log")}
+ ctx.NoteTestWritesToUmask()
+
+ ctx.env = []string{"TOOLCHAIN_RUSAGE_OUTPUT=" + filepath.Join(ctx.tempDir, "rusage.log")}
work(ctx)
})
}
diff --git a/compiler_wrapper/sanitizer_flags.go b/compiler_wrapper/sanitizer_flags.go
index fe8d150..da0a64b 100644
--- a/compiler_wrapper/sanitizer_flags.go
+++ b/compiler_wrapper/sanitizer_flags.go
@@ -9,20 +9,13 @@
)
func processSanitizerFlags(builder *commandBuilder) {
- hasCoverageFlags := false
hasSanitizeFlags := false
- hasSanitizeFuzzerFlags := false
for _, arg := range builder.args {
// TODO: This should probably be -fsanitize= to not match on
- // e.g. -fsanitize-blacklist
+ // e.g. -fsanitize-blocklist
if arg.fromUser {
if strings.HasPrefix(arg.value, "-fsanitize") {
hasSanitizeFlags = true
- if strings.Contains(arg.value, "fuzzer") {
- hasSanitizeFuzzerFlags = true
- }
- } else if arg.value == "-fprofile-instr-generate" {
- hasCoverageFlags = true
}
}
}
@@ -44,15 +37,5 @@
}
return arg.value
})
- if builder.target.compilerType == clangType {
- // hasSanitizeFlags && hasCoverageFlags is to work around crbug.com/1013622
- if hasSanitizeFuzzerFlags || (hasSanitizeFlags && hasCoverageFlags) {
- fuzzerFlagsToAdd := []string{
- // TODO: This flag should be removed once fuzzer works with new pass manager
- "-fno-experimental-new-pass-manager",
- }
- builder.addPreUserArgs(fuzzerFlagsToAdd...)
- }
- }
}
}
diff --git a/compiler_wrapper/sanitizer_flags_test.go b/compiler_wrapper/sanitizer_flags_test.go
index 8f50a90..a401d58 100644
--- a/compiler_wrapper/sanitizer_flags_test.go
+++ b/compiler_wrapper/sanitizer_flags_test.go
@@ -98,55 +98,3 @@
}
})
}
-
-func TestAddFuzzerFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=fuzzer", mainCc)))
- if err := verifyArgOrder(cmd, "-fno-experimental-new-pass-manager",
- "-fsanitize=fuzzer", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitFuzzerFlagsForGcc(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(gccX86_64, "-fsanitize=fuzzer", mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-experimental-new-pass-manager"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestAddSanitizerCoverageFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=address", "-fprofile-instr-generate", mainCc)))
- if err := verifyArgOrder(cmd, "-fno-experimental-new-pass-manager",
- "-fsanitize=address", "-fprofile-instr-generate", mainCc); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestOmitSanitizerCoverageFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fsanitize=address", mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-experimental-new-pass-manager"); err != nil {
- t.Error(err)
- }
- })
-}
-
-func TestKeepSanitizerCoverageFlagsForClang(t *testing.T) {
- withTestContext(t, func(ctx *testContext) {
- cmd := ctx.must(callCompiler(ctx, ctx.cfg,
- ctx.newCommand(clangX86_64, "-fprofile-instr-generate", mainCc)))
- if err := verifyArgCount(cmd, 0, "-fno-experimental-new-pass-manager"); err != nil {
- t.Error(err)
- }
- })
-}
diff --git a/compiler_wrapper/sysroot_flag.go b/compiler_wrapper/sysroot_flag.go
index 501fb20..e0583b2 100644
--- a/compiler_wrapper/sysroot_flag.go
+++ b/compiler_wrapper/sysroot_flag.go
@@ -11,9 +11,14 @@
func processSysrootFlag(builder *commandBuilder) {
fromUser := false
+ userSysroot := ""
for _, arg := range builder.args {
if arg.fromUser && strings.HasPrefix(arg.value, "--sysroot=") {
fromUser = true
+ sysrootArg := strings.Split(arg.value, "=")
+ if len(sysrootArg) == 2 {
+ userSysroot = sysrootArg[1]
+ }
break
}
}
@@ -27,5 +32,13 @@
}
if !fromUser {
builder.addPreUserArgs("--sysroot=" + sysroot)
+ } else {
+ sysroot = userSysroot
}
+
+ libdir := "-L" + sysroot + "/usr/lib"
+ if strings.Contains(builder.target.target, "64") {
+ libdir += "64"
+ }
+ builder.addPostUserArgs(libdir)
}
diff --git a/compiler_wrapper/sysroot_flag_test.go b/compiler_wrapper/sysroot_flag_test.go
index 308d5e9..b05a627 100644
--- a/compiler_wrapper/sysroot_flag_test.go
+++ b/compiler_wrapper/sysroot_flag_test.go
@@ -57,7 +57,7 @@
func TestSetSysrootRelativeToWrapperPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.gccRootRelPath = "somepath"
cmd := ctx.must(callCompiler(ctx, ctx.cfg,
ctx.newCommand(gccX86_64, mainCc)))
if err := verifyArgOrder(cmd,
@@ -69,7 +69,7 @@
func TestSetSysrootRelativeToSymlinkedWrapperPath(t *testing.T) {
withTestContext(t, func(ctx *testContext) {
- ctx.cfg.rootRelPath = "somepath"
+ ctx.cfg.gccRootRelPath = "somepath"
linkedWrapperPath := path.Join(ctx.tempDir, "a/linked/path/x86_64-cros-linux-gnu-gcc")
ctx.symlink(path.Join(ctx.tempDir, gccX86_64), linkedWrapperPath)
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json b/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json
index 1c2e5a0..e237c7c 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/bisect.json
@@ -26,10 +26,9 @@
"/tmp/stable/clang",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -37,11 +36,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
],
"env_updates": [
"PYTHONPATH=/somepath/test_binary"
@@ -78,10 +81,9 @@
"/tmp/stable/clang",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -89,11 +91,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
],
"env_updates": [
"PYTHONPATH=/somepath/test_binary"
@@ -133,10 +139,9 @@
"/tmp/stable/clang",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -144,11 +149,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
],
"env_updates": [
"PYTHONPATH=/somepath/test_binary"
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json
index ec57b0d..07fccc6 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clang_ftrapv_maincc_target_specific.json
@@ -17,10 +17,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -28,11 +27,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -56,10 +59,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -67,11 +69,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -95,10 +101,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -106,11 +111,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -134,10 +143,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -145,11 +153,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -173,10 +185,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -184,11 +195,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -212,10 +227,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -223,11 +237,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -251,10 +269,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -262,11 +279,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -290,10 +311,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -301,11 +321,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -329,10 +353,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -340,11 +363,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json
index 99c1272..a221605 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clang_host_wrapper.json
@@ -16,10 +16,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -27,11 +26,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json
index 9389a4d..2130d52 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clang_maincc_target_specific.json
@@ -16,10 +16,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -27,11 +26,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -54,10 +57,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -65,11 +67,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -92,10 +98,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -103,11 +108,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -130,10 +139,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -141,11 +149,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -168,10 +180,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -179,11 +190,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -206,10 +221,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -217,11 +231,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -244,10 +262,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -255,11 +272,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -282,10 +303,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -293,11 +313,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -320,10 +344,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -331,11 +354,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json
index 080f1d4..43ae728 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clang_path.json
@@ -16,10 +16,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -27,11 +26,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -57,10 +60,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -68,11 +70,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
},
"stdout": "somemessage",
@@ -98,10 +104,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -109,11 +114,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -139,10 +148,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -150,11 +158,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -187,10 +199,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -198,13 +209,17 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -240,10 +255,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -251,13 +265,17 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -293,10 +311,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -304,13 +321,17 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
},
"stdout": "somemessage",
@@ -336,10 +357,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -347,11 +367,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -374,10 +398,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -385,11 +408,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -412,10 +439,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -423,11 +449,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -450,10 +480,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -461,11 +490,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -491,10 +524,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -502,11 +534,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json
index 0aeb236..b8b28cd 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clang_sanitizer_args.json
@@ -18,10 +18,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -29,12 +28,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fsanitize=kernel-address",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -59,10 +62,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -70,12 +72,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fsanitize=kernel-address",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -100,10 +106,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -111,12 +116,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fsanitize=kernel-address",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -141,10 +150,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -152,12 +160,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fsanitize=kernel-address",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -181,10 +193,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -192,13 +203,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fno-experimental-new-pass-manager",
+ "-fcommon",
"-fsanitize=fuzzer",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -223,10 +237,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -234,14 +247,17 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fno-experimental-new-pass-manager",
+ "-fcommon",
"-fsanitize=address",
"-fprofile-instr-generate",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -265,10 +281,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -276,12 +291,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fsanitize=address",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -305,10 +324,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -316,12 +334,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fprofile-instr-generate",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json b/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json
index bddb110..7c4afd3 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clang_specific_args.json
@@ -6,7 +6,6 @@
"path": "./x86_64-cros-linux-gnu-clang",
"args": [
"-mno-movbe",
- "-pass-exit-codes",
"-Wclobbered",
"-Wno-psabi",
"-Wlogical-op",
@@ -27,10 +26,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -38,7 +36,9 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-mno-movbe",
"-Wclobbered",
"-Wno-psabi",
@@ -50,7 +50,9 @@
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -74,10 +76,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -85,12 +86,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-Wno-#warnings",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -114,10 +119,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -125,132 +129,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-Wno-error=uninitialized",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-fno-addrsig",
- "-fcommon",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-Wno-unknown-warning-option",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-Wno-error=unused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-fno-addrsig",
- "-fcommon",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-Wno-unknown-warning-option",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-Wno-unused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wunused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/tmp/stable/clang",
- "args": [
- "-Qunused-arguments",
- "-fno-addrsig",
- "-fcommon",
- "-fuse-ld=lld",
- "-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-Wno-unknown-warning-option",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-Wunused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -274,10 +162,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -285,12 +172,16 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-someflag",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json b/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json
index 89e4c6f..f678ba6 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/clangtidy.json
@@ -32,10 +32,9 @@
"-resource-dir=someResourceDir",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -43,11 +42,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
},
@@ -57,10 +60,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -68,11 +70,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -112,10 +118,9 @@
"-resource-dir=someResourceDir",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -123,11 +128,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
},
@@ -137,10 +146,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -148,11 +156,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -194,10 +206,9 @@
"-resource-dir=someResourceDir",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -205,11 +216,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
},
"stdout": "somemessage",
@@ -222,10 +237,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -233,11 +247,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -280,10 +298,9 @@
"-resource-dir=someResourceDir",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -291,11 +308,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
},
@@ -305,10 +326,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -316,11 +336,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
},
"stdout": "somemessage",
diff --git a/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json
index 75f5e47..f5a7771 100644
--- a/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json
+++ b/compiler_wrapper/testdata/cros_clang_host_golden/force_disable_werror.json
@@ -19,10 +19,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -30,11 +29,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
}
}
@@ -60,10 +63,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -71,11 +73,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
},
"stderr": "-Werror originalerror",
@@ -87,10 +93,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -98,11 +103,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-Wno-error",
"-Wno-error=poison-system-directories"
]
@@ -132,10 +141,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -143,11 +151,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation"
+ "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable"
]
},
"stderr": "-Werror originalerror",
@@ -159,10 +171,9 @@
"args": [
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
"-fuse-ld=lld",
+ "-fdebug-default-version=5",
"-Wno-unused-local-typedefs",
- "-Wno-deprecated-declarations",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-final-dtor-non-final-class",
@@ -170,11 +181,15 @@
"-Wno-unknown-warning-option",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"main.cc",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-Wno-error",
"-Wno-error=poison-system-directories"
]
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json
index 62afbba..825e5dd 100644
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json
+++ b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_host_wrapper.json
@@ -17,6 +17,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json
index 6c88c34..97fb19b 100644
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_maincc_target_specific.json
@@ -17,6 +17,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -41,6 +42,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -65,6 +67,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -89,6 +92,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -113,6 +117,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -137,6 +142,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -161,6 +167,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -185,6 +192,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -209,6 +217,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json
index b846d47..c71bcd4 100644
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json
+++ b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_path.json
@@ -17,6 +17,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -44,6 +45,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
},
@@ -71,6 +73,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -95,6 +98,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -119,6 +123,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
@@ -146,6 +151,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"main.cc"
]
}
diff --git a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json
index e147047..cd65630 100644
--- a/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json
+++ b/compiler_wrapper/testdata/cros_gcc_host_golden/gcc_specific_args.json
@@ -18,6 +18,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"-march=goldmont",
"main.cc"
]
@@ -44,6 +45,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"-march=goldmont-plus",
"main.cc"
]
@@ -70,6 +72,7 @@
"-Wno-maybe-uninitialized",
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
+ "-fcommon",
"-march=skylake",
"main.cc"
]
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/bisect.json b/compiler_wrapper/testdata/cros_hardened_golden/bisect.json
index b8e0e46..05aea31 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/bisect.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/bisect.json
@@ -28,27 +28,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -94,27 +101,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -163,27 +177,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json
index 13e97dc..2b04d0c 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clang_ftrapv_maincc_target_specific.json
@@ -19,28 +19,35 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -75,28 +82,35 @@
"--sysroot=/usr/x86_64-cros-eabi",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-eabi-",
"-ftrapv",
"main.cc",
+ "-L/usr/x86_64-cros-eabi/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -131,28 +145,35 @@
"--sysroot=/usr/x86_64-cros-win-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-win-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/x86_64-cros-win-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -187,28 +208,35 @@
"--sysroot=/usr/armv7m-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-linux-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv7m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-linux-gnu"
@@ -242,28 +270,35 @@
"--sysroot=/usr/armv7m-cros-eabi",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/armv7m-cros-eabi-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv7m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-eabi"
@@ -297,28 +332,35 @@
"--sysroot=/usr/armv7m-cros-win-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-win-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv7m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-win-gnu"
@@ -352,28 +394,35 @@
"--sysroot=/usr/armv8m-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-linux-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv8m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-linux-gnu"
@@ -407,28 +456,35 @@
"--sysroot=/usr/armv8m-cros-eabi",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/armv8m-cros-eabi-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv8m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-eabi"
@@ -462,28 +518,35 @@
"--sysroot=/usr/armv8m-cros-win-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-win-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv8m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-win-gnu"
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json
index b604692..ea83417 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clang_maincc_target_specific.json
@@ -18,27 +18,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -72,27 +79,34 @@
"--sysroot=/usr/x86_64-cros-eabi",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-eabi-",
"main.cc",
+ "-L/usr/x86_64-cros-eabi/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -126,27 +140,34 @@
"--sysroot=/usr/x86_64-cros-win-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-win-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-win-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -180,27 +201,34 @@
"--sysroot=/usr/armv7m-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-linux-gnu-",
"main.cc",
+ "-L/usr/armv7m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-linux-gnu"
@@ -233,27 +261,34 @@
"--sysroot=/usr/armv7m-cros-eabi",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/armv7m-cros-eabi-",
"main.cc",
+ "-L/usr/armv7m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-eabi"
@@ -286,27 +321,34 @@
"--sysroot=/usr/armv7m-cros-win-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-win-gnu-",
"main.cc",
+ "-L/usr/armv7m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-win-gnu"
@@ -339,27 +381,34 @@
"--sysroot=/usr/armv8m-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-linux-gnu-",
"main.cc",
+ "-L/usr/armv8m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-linux-gnu"
@@ -392,27 +441,34 @@
"--sysroot=/usr/armv8m-cros-eabi",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/armv8m-cros-eabi-",
"main.cc",
+ "-L/usr/armv8m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-eabi"
@@ -445,27 +501,34 @@
"--sysroot=/usr/armv8m-cros-win-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-win-gnu-",
"main.cc",
+ "-L/usr/armv8m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-win-gnu"
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json
index 9780356..52d4184 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clang_path.json
@@ -18,27 +18,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -75,27 +82,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -132,27 +146,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -189,27 +210,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -253,29 +281,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -322,29 +357,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -386,29 +428,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -445,27 +494,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -495,33 +551,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "a/b/c/d/e/usr/bin/clang",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -549,33 +612,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "a/b/c/d/e/usr/bin/clang",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -603,33 +673,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "../usr/bin/clang",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -660,33 +737,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "/tmp/usr/bin/clang",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
],
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json
index b90ad65..b71a880 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clang_sanitizer_args.json
@@ -20,27 +20,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -76,27 +83,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -132,27 +146,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -188,27 +209,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -243,28 +271,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
- "-fno-experimental-new-pass-manager",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=fuzzer",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -300,29 +334,35 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
- "-fno-experimental-new-pass-manager",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=address",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -357,27 +397,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -412,28 +459,35 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json
index 0d4aaf9..d10e6cd 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clang_specific_args.json
@@ -6,7 +6,6 @@
"path": "./x86_64-cros-linux-gnu-clang",
"args": [
"-mno-movbe",
- "-pass-exit-codes",
"-Wclobbered",
"-Wno-psabi",
"-Wlogical-op",
@@ -29,23 +28,27 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-mno-movbe",
"-Wclobbered",
"-Wno-psabi",
@@ -55,9 +58,12 @@
"-Woverride-init",
"-Wunsafe-loop-optimizations",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -92,28 +98,35 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-Wno-#warnings",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -148,196 +161,35 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-Wno-error=uninitialized",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
- "-mno-movbe",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-fno-addrsig",
- "-fcommon",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wno-error=unused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation",
- "-mno-movbe",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-fno-addrsig",
- "-fcommon",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wno-unused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation",
- "-mno-movbe",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wunused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-fno-addrsig",
- "-fcommon",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-fuse-ld=lld",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fstack-protector-strong",
- "-fPIE",
- "-pie",
- "-D_FORTIFY_SOURCE=2",
- "-fno-omit-frame-pointer",
- "-Wunused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -372,28 +224,35 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-someflag",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json
index 0ef5535..4220918 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clang_sysroot_wrapper_common.json
@@ -19,12 +19,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -53,27 +56,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -110,27 +120,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -160,25 +177,32 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -213,26 +237,33 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fno-stack-protector",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-D__KERNEL__",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -267,26 +298,33 @@
"--sysroot=/usr/armv7a-cros-linux-gnueabihf",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
"-fno-stack-protector",
+ "--prefix=../../bin/armv7a-cros-linux-gnueabihf-",
"-D__KERNEL__",
"main.cc",
+ "-L/usr/armv7a-cros-linux-gnueabihf/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7a-cros-linux-gnueabihf"
@@ -319,28 +357,35 @@
"../../usr/bin/clang",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"--sysroot=xyz",
"main.cc",
+ "-Lxyz/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json b/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json
index 06e114c..ea1363e 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/clangtidy.json
@@ -33,27 +33,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -68,27 +75,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -133,27 +147,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -169,27 +190,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -236,27 +264,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -275,27 +310,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -343,27 +385,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -379,27 +428,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json
index cd460a7..4df8157 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/force_disable_werror.json
@@ -21,27 +21,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -78,27 +85,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -121,27 +135,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -182,27 +203,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -225,27 +253,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json
index c7f619d..e2037e2 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/gcc_clang_syntax.json
@@ -18,27 +18,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -57,12 +64,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -96,27 +106,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -135,12 +152,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -169,27 +189,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -226,27 +253,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -265,12 +299,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json
index a45c95f..0cc3d8a 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/gcc_maincc_target_specific.json
@@ -19,12 +19,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -55,12 +58,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-eabi/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -91,12 +97,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-win-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -127,12 +136,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv7m-cros-linux-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -162,12 +174,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
- "main.cc"
+ "-static-libgcc",
+ "main.cc",
+ "-L/usr/armv7m-cros-eabi/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -197,12 +212,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv7m-cros-win-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -232,12 +250,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv8m-cros-linux-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -267,12 +288,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
- "main.cc"
+ "-static-libgcc",
+ "main.cc",
+ "-L/usr/armv8m-cros-eabi/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -302,12 +326,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv8m-cros-win-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json
index 36bf222..5b129d7 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/gcc_path.json
@@ -19,12 +19,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -58,12 +61,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -97,12 +103,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -133,12 +142,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -169,12 +181,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -208,12 +223,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json
index 3807d8a..a61ead8 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/gcc_sanitizer_args.json
@@ -21,13 +21,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -60,13 +63,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -99,13 +105,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -138,13 +147,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -176,13 +188,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=fuzzer",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -215,14 +230,17 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=address",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -254,13 +272,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fsanitize=address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -292,13 +313,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json
index e8a57b2..f7b2000 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/gcc_specific_args.json
@@ -20,13 +20,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
- "-march=silvermont",
+ "-static-libgcc",
+ "-march=goldmont",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -58,13 +61,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
- "-march=silvermont",
+ "-static-libgcc",
+ "-march=goldmont-plus",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -96,13 +102,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
- "-march=corei7",
+ "-static-libgcc",
+ "-march=skylake",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json
index d17707a..299b46b 100644
--- a/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json
+++ b/compiler_wrapper/testdata/cros_hardened_golden/gcc_sysroot_wrapper_common.json
@@ -19,12 +19,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -54,12 +57,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -93,12 +99,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -126,10 +135,13 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -161,11 +173,14 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"-fno-stack-protector",
"-D__KERNEL__",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -197,11 +212,14 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-D_FORTIFY_SOURCE=2",
+ "-static-libgcc",
"-mthumb",
"-fno-stack-protector",
"-D__KERNEL__",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv7a-cros-linux-gnueabihf/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -231,13 +249,16 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"--sysroot=xyz",
"main.cc",
+ "-Lxyz/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json
index b8e0e46..05aea31 100644
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json
+++ b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/bisect.json
@@ -28,27 +28,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -94,27 +101,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -163,27 +177,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json
index 9780356..52d4184 100644
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json
+++ b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clang_path.json
@@ -18,27 +18,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -75,27 +82,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -132,27 +146,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -189,27 +210,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -253,29 +281,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -322,29 +357,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -386,29 +428,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -445,27 +494,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -495,33 +551,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "a/b/c/d/e/usr/bin/clang",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -549,33 +612,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "a/b/c/d/e/usr/bin/clang",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -603,33 +673,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "../usr/bin/clang",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -660,33 +737,40 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "/tmp/usr/bin/clang",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
],
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json
index 06e114c..ea1363e 100644
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json
+++ b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/clangtidy.json
@@ -33,27 +33,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -68,27 +75,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -133,27 +147,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -169,27 +190,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -236,27 +264,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -275,27 +310,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -343,27 +385,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -379,27 +428,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json
index cd460a7..4df8157 100644
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json
+++ b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/force_disable_werror.json
@@ -21,27 +21,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -78,27 +85,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -121,27 +135,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -182,27 +203,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -225,27 +253,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json
index c7f619d..e2037e2 100644
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json
+++ b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_clang_syntax.json
@@ -18,27 +18,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -57,12 +64,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -96,27 +106,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -135,12 +152,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -169,27 +189,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -226,27 +253,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -265,12 +299,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json
index 36bf222..5b129d7 100644
--- a/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json
+++ b/compiler_wrapper/testdata/cros_hardened_llvmnext_golden/gcc_path.json
@@ -19,12 +19,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -58,12 +61,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -97,12 +103,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -133,12 +142,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -169,12 +181,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -208,12 +223,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json
index b53bcc5..a676dc6 100644
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json
+++ b/compiler_wrapper/testdata/cros_hardened_noccache_golden/bisect.json
@@ -27,27 +27,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -89,27 +96,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -154,27 +168,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json
index 9176e9d..cc6d1ad 100644
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json
+++ b/compiler_wrapper/testdata/cros_hardened_noccache_golden/clang_path.json
@@ -17,27 +17,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -68,27 +75,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -119,27 +133,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -170,27 +191,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -228,29 +256,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -292,29 +327,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -355,29 +397,36 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -408,27 +457,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -451,34 +507,41 @@
"cmds": [
{
"cmd": {
- "path": "a/b/usr/bin/clang",
+ "path": "a/b/c/d/e/usr/bin/clang",
"args": [
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
]
@@ -499,34 +562,41 @@
"cmds": [
{
"cmd": {
- "path": "a/b/usr/bin/clang",
+ "path": "a/b/c/d/e/usr/bin/clang",
"args": [
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
]
@@ -547,34 +617,41 @@
"cmds": [
{
"cmd": {
- "path": "../../usr/bin/clang",
+ "path": "../usr/bin/clang",
"args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
]
@@ -598,34 +675,41 @@
"cmds": [
{
"cmd": {
- "path": "/usr/bin/clang",
+ "path": "/tmp/usr/bin/clang",
"args": [
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
]
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json
index 06e114c..ea1363e 100644
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json
+++ b/compiler_wrapper/testdata/cros_hardened_noccache_golden/clangtidy.json
@@ -33,27 +33,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -68,27 +75,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -133,27 +147,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -169,27 +190,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -236,27 +264,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -275,27 +310,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -343,27 +385,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -379,27 +428,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json
index a85e91e..2c34edb 100644
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json
+++ b/compiler_wrapper/testdata/cros_hardened_noccache_golden/force_disable_werror.json
@@ -20,27 +20,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -71,27 +78,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -108,27 +122,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -163,27 +184,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -200,27 +228,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json
index fe1e9b3..368eb85 100644
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json
+++ b/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_clang_syntax.json
@@ -18,27 +18,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -56,12 +63,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -91,27 +101,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -130,12 +147,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -164,27 +184,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -221,27 +248,34 @@
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
"-fno-addrsig",
- "-fcommon",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-fuse-ld=lld",
+ "--unwindlib=libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -259,12 +293,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
},
diff --git a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json b/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json
index 694b921..9393c4b 100644
--- a/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json
+++ b/compiler_wrapper/testdata/cros_hardened_noccache_golden/gcc_path.json
@@ -18,12 +18,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -52,12 +55,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
},
@@ -86,12 +92,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -117,12 +126,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -148,12 +160,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -182,12 +197,15 @@
"-fno-reorder-blocks-and-partition",
"-Wno-unused-local-typedefs",
"-Wno-maybe-uninitialized",
+ "-fcommon",
"-fstack-protector-strong",
"-fPIE",
"-pie",
"-D_FORTIFY_SOURCE=2",
"-fno-omit-frame-pointer",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json b/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json
index b7e4de3..6c46fee 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/bisect.json
@@ -27,20 +27,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -85,20 +91,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -146,20 +158,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json
index a9e8b85..86960cc 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_ftrapv_maincc_target_specific.json
@@ -18,21 +18,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -66,21 +72,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-eabi",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-eabi-",
"-ftrapv",
"main.cc",
+ "-L/usr/x86_64-cros-eabi/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -114,21 +126,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-win-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-win-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/x86_64-cros-win-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -162,22 +180,28 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7m-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-linux-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv7m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-linux-gnu"
@@ -210,21 +234,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7m-cros-eabi",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/armv7m-cros-eabi-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv7m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-eabi"
@@ -257,22 +287,28 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7m-cros-win-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-win-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv7m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-win-gnu"
@@ -305,22 +341,28 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv8m-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-linux-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv8m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-linux-gnu"
@@ -353,21 +395,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv8m-cros-eabi",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/armv8m-cros-eabi-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv8m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-eabi"
@@ -400,22 +448,28 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv8m-cros-win-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-win-gnu-",
"-ftrapv",
"main.cc",
+ "-L/usr/armv8m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-win-gnu"
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json
index bedbd6e..69af166 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_maincc_target_specific.json
@@ -17,20 +17,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -63,20 +69,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-eabi",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-eabi-",
"main.cc",
+ "-L/usr/x86_64-cros-eabi/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -109,20 +121,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-win-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-win-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-win-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -155,21 +173,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7m-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-linux-gnu-",
"main.cc",
+ "-L/usr/armv7m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-linux-gnu"
@@ -201,20 +225,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7m-cros-eabi",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/armv7m-cros-eabi-",
"main.cc",
+ "-L/usr/armv7m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-eabi"
@@ -246,21 +276,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7m-cros-win-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv7m-cros-win-gnu-",
"main.cc",
+ "-L/usr/armv7m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7m-cros-win-gnu"
@@ -292,21 +328,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv8m-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-linux-gnu-",
"main.cc",
+ "-L/usr/armv8m-cros-linux-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-linux-gnu"
@@ -338,20 +380,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv8m-cros-eabi",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/armv8m-cros-eabi-",
"main.cc",
+ "-L/usr/armv8m-cros-eabi/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-eabi"
@@ -383,21 +431,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv8m-cros-win-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
+ "--prefix=../../bin/armv8m-cros-win-gnu-",
"main.cc",
+ "-L/usr/armv8m-cros-win-gnu/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv8m-cros-win-gnu"
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json
index edd40b7..afc7951 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_path.json
@@ -17,20 +17,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -66,20 +72,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -115,20 +127,26 @@
"../../usr/bin/clang++",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -164,20 +182,26 @@
"somepath/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -220,22 +244,28 @@
"/somedir/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -281,22 +311,28 @@
"/somedir/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -337,22 +373,28 @@
"/somedir/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-resource-dir=someResourceDir",
"--gcc-toolchain=/usr",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -388,20 +430,26 @@
"/usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -431,25 +479,31 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "a/b/c/d/e/usr/bin/clang",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -477,25 +531,31 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "a/b/usr/bin/clang",
- "--sysroot=/tmp/stable/a/b/usr/x86_64-cros-linux-gnu",
+ "a/b/c/d/e/usr/bin/clang",
+ "--sysroot=/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=a/b/c/d/e/bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/stable/a/b/c/d/e/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-Ba/b/bin",
+ "-Ba/b/c/d/e/bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -523,25 +583,31 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "../usr/bin/clang",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
],
@@ -572,25 +638,31 @@
"cmd": {
"path": "/usr/bin/ccache",
"args": [
- "/usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
+ "/tmp/usr/bin/clang",
+ "--sysroot=/tmp/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/tmp/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
- "-B../../bin",
+ "-B../bin",
"-target",
"x86_64-cros-linux-gnu"
],
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json
index e10a92b..3e140d5 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sanitizer_args.json
@@ -19,21 +19,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -68,21 +74,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -117,21 +129,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -166,21 +184,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -214,22 +238,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fno-experimental-new-pass-manager",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=fuzzer",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -264,23 +293,28 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-fno-experimental-new-pass-manager",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=address",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -314,21 +348,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fsanitize=address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -362,21 +402,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json
index aafdaba..fbe038e 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_specific_args.json
@@ -6,7 +6,6 @@
"path": "./x86_64-cros-linux-gnu-clang",
"args": [
"-mno-movbe",
- "-pass-exit-codes",
"-Wclobbered",
"-Wno-psabi",
"-Wlogical-op",
@@ -28,16 +27,19 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-mno-movbe",
"-Wclobbered",
"-Wno-psabi",
@@ -47,9 +49,12 @@
"-Woverride-init",
"-Wunsafe-loop-optimizations",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -83,21 +88,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-Wno-#warnings",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -131,165 +142,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-Wno-error=uninitialized",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
- "-mno-movbe",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-error=unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-Wno-error=unused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation",
- "-mno-movbe",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wno-unused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-Wno-unused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation",
- "-mno-movbe",
- "-B../../bin",
- "-target",
- "x86_64-cros-linux-gnu"
- ],
- "env_updates": [
- "CCACHE_DIR=/var/cache/distfiles/ccache",
- "CCACHE_UMASK=002",
- "CCACHE_CPP2=yes"
- ]
- }
- }
- ]
- },
- {
- "wd": "/tmp/stable",
- "wrapper": {
- "cmd": {
- "path": "./x86_64-cros-linux-gnu-clang",
- "args": [
- "-Wunused-but-set-variable",
- "main.cc"
- ]
- }
- },
- "cmds": [
- {
- "cmd": {
- "path": "/usr/bin/ccache",
- "args": [
- "../../usr/bin/clang",
- "--sysroot=/usr/x86_64-cros-linux-gnu",
- "-Qunused-arguments",
- "-Wno-tautological-constant-compare",
- "-Wno-tautological-unsigned-enum-zero-compare",
- "-Wno-unknown-warning-option",
- "-Wno-section",
- "-static-libgcc",
- "-Wno-final-dtor-non-final-class",
- "-Werror=poison-system-directories",
- "-fexperimental-new-pass-manager",
- "-Wno-compound-token-split-by-macro",
- "-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
- "-Wunused-variable",
- "main.cc",
- "-Wno-implicit-int-float-conversion",
- "-Wno-compound-token-split-by-space",
- "-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -323,21 +196,27 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-someflag",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json
index e094f91..f2ccadb 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clang_sysroot_wrapper_common.json
@@ -20,7 +20,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -48,20 +50,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -97,20 +105,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -139,20 +153,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -186,22 +206,28 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-fno-stack-protector",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"-D__KERNEL__",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -235,23 +261,29 @@
"../../usr/bin/clang",
"--sysroot=/usr/armv7a-cros-linux-gnueabihf",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
"-mthumb",
"-fno-stack-protector",
+ "--prefix=../../bin/armv7a-cros-linux-gnueabihf-",
"-D__KERNEL__",
"main.cc",
+ "-L/usr/armv7a-cros-linux-gnueabihf/usr/lib",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-B../../bin",
"-target",
"armv7a-cros-linux-gnueabihf"
@@ -283,21 +315,27 @@
"args": [
"../../usr/bin/clang",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"--sysroot=xyz",
"main.cc",
+ "-Lxyz/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json b/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json
index 4e6ec63..dc641c9 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/clangtidy.json
@@ -32,20 +32,26 @@
"-resource-dir=someResourceDir",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -59,20 +65,26 @@
"args": [
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -116,20 +128,26 @@
"-resource-dir=someResourceDir",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -144,20 +162,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -203,20 +227,26 @@
"-resource-dir=someResourceDir",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -234,20 +264,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -294,20 +330,26 @@
"-resource-dir=someResourceDir",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -322,20 +364,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json b/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json
index ef266ef..54b994c 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/force_disable_werror.json
@@ -20,20 +20,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -69,20 +75,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -104,20 +116,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -157,20 +175,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -192,20 +216,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json
index 9ea1c01..5234715 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_clang_syntax.json
@@ -17,20 +17,26 @@
"args": [
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -50,7 +56,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -83,20 +91,26 @@
"../../usr/bin/clang",
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -116,7 +130,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -144,20 +160,26 @@
"args": [
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -193,20 +215,26 @@
"args": [
"--sysroot=/usr/x86_64-cros-linux-gnu",
"-Qunused-arguments",
+ "-fdebug-default-version=5",
"-Wno-tautological-constant-compare",
"-Wno-tautological-unsigned-enum-zero-compare",
"-Wno-unknown-warning-option",
"-Wno-section",
- "-static-libgcc",
"-Wno-final-dtor-non-final-class",
"-Werror=poison-system-directories",
"-fexperimental-new-pass-manager",
"-Wno-compound-token-split-by-macro",
+ "-Wno-deprecated-declarations",
"-fcrash-diagnostics-dir=/tmp/stable/clang_crash_diagnostics",
+ "-static-libgcc",
+ "--prefix=../../bin/x86_64-cros-linux-gnu-",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-Wno-implicit-int-float-conversion",
"-Wno-compound-token-split-by-space",
"-Wno-string-concatenation",
+ "-Wno-deprecated-copy",
+ "-Wno-unused-but-set-variable",
"-mno-movbe",
"-B../../bin",
"-target",
@@ -226,7 +254,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json
index d11a741..24f90fb 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_maincc_target_specific.json
@@ -20,7 +20,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -52,7 +54,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-eabi/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -84,7 +88,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-win-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -116,8 +122,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv7m-cros-linux-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -148,7 +156,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
- "main.cc"
+ "-static-libgcc",
+ "main.cc",
+ "-L/usr/armv7m-cros-eabi/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -179,8 +189,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv7m-cros-win-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -211,8 +223,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv8m-cros-linux-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -243,7 +257,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
- "main.cc"
+ "-static-libgcc",
+ "main.cc",
+ "-L/usr/armv8m-cros-eabi/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -274,8 +290,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-mthumb",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv8m-cros-win-gnu/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json
index 17483ee..b266293 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_path.json
@@ -20,7 +20,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -55,7 +57,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -90,7 +94,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -122,7 +128,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -154,7 +162,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/tmp/stable/a/b/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -189,7 +199,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json
index 4aacc51..79e02c3 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sanitizer_args.json
@@ -22,8 +22,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -57,8 +59,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -92,8 +96,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -127,8 +133,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=kernel-address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -161,8 +169,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=fuzzer",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -196,9 +206,11 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=address",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -231,8 +243,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fsanitize=address",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -265,8 +279,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fprofile-instr-generate",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json
index c2f7cd0..e0fe551 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_specific_args.json
@@ -21,8 +21,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
- "-march=silvermont",
+ "-static-libgcc",
+ "-march=goldmont",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -55,8 +57,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
- "-march=silvermont",
+ "-static-libgcc",
+ "-march=goldmont-plus",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -89,8 +93,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
- "-march=corei7",
+ "-static-libgcc",
+ "-march=skylake",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json
index be2df4c..ba2f292 100644
--- a/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json
+++ b/compiler_wrapper/testdata/cros_nonhardened_golden/gcc_sysroot_wrapper_common.json
@@ -20,7 +20,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -51,7 +53,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -86,7 +90,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
]
}
@@ -115,7 +121,9 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -148,9 +156,11 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-fno-stack-protector",
"-D__KERNEL__",
"main.cc",
+ "-L/usr/x86_64-cros-linux-gnu/usr/lib64",
"-mno-movbe"
],
"env_updates": [
@@ -183,10 +193,12 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"-mthumb",
"-fno-stack-protector",
"-D__KERNEL__",
- "main.cc"
+ "main.cc",
+ "-L/usr/armv7a-cros-linux-gnueabihf/usr/lib"
],
"env_updates": [
"CCACHE_DIR=/var/cache/distfiles/ccache",
@@ -217,8 +229,10 @@
"-Wno-unused-local-typedefs",
"-Wno-deprecated-declarations",
"-Wtrampolines",
+ "-static-libgcc",
"--sysroot=xyz",
"main.cc",
+ "-Lxyz/usr/lib64",
"-mno-movbe"
],
"env_updates": [
diff --git a/compiler_wrapper/testutil_test.go b/compiler_wrapper/testutil_test.go
index 21b7169..035f237 100644
--- a/compiler_wrapper/testutil_test.go
+++ b/compiler_wrapper/testutil_test.go
@@ -14,7 +14,10 @@
"path/filepath"
"regexp"
"strings"
+ "sync"
+ "syscall"
"testing"
+ "time"
)
const (
@@ -43,8 +46,15 @@
stdinBuffer bytes.Buffer
stdoutBuffer bytes.Buffer
stderrBuffer bytes.Buffer
+
+ umaskRestoreAction func()
}
+// We have some tests which modify our umask, and other tests which depend upon the value of our
+// umask remaining consistent. This lock serializes those. Please use `NoteTestWritesToUmask()` and
+// `NoteTestDependsOnUmask()` on `testContext` rather than using this directly.
+var umaskModificationLock sync.RWMutex
+
func withTestContext(t *testing.T, work func(ctx *testContext)) {
t.Parallel()
tempDir, err := ioutil.TempDir("", "compiler_wrapper")
@@ -62,11 +72,48 @@
}
ctx.updateConfig(&config{})
+ defer ctx.maybeReleaseUmaskDependency()
work(&ctx)
}
var _ env = (*testContext)(nil)
+func (ctx *testContext) umask(mask int) (oldmask int) {
+ if ctx.umaskRestoreAction == nil {
+ panic("Umask operations requested in test without declaring a umask dependency")
+ }
+ return syscall.Umask(mask)
+}
+
+func (ctx *testContext) initUmaskDependency(lockFn func(), unlockFn func()) {
+ if ctx.umaskRestoreAction != nil {
+ // Use a panic so we get a backtrace.
+ panic("Multiple notes of a test depending on the value of `umask` given -- tests " +
+ "are only allowed up to one.")
+ }
+
+ lockFn()
+ ctx.umaskRestoreAction = unlockFn
+}
+
+func (ctx *testContext) maybeReleaseUmaskDependency() {
+ if ctx.umaskRestoreAction != nil {
+ ctx.umaskRestoreAction()
+ }
+}
+
+// Note that the test depends on a stable value for the process' umask.
+func (ctx *testContext) NoteTestReadsFromUmask() {
+ ctx.initUmaskDependency(umaskModificationLock.RLock, umaskModificationLock.RUnlock)
+}
+
+// Note that the test modifies the process' umask. This implies a dependency on the process' umask,
+// so it's an error to call both NoteTestWritesToUmask and NoteTestReadsFromUmask from the same
+// test.
+func (ctx *testContext) NoteTestWritesToUmask() {
+ ctx.initUmaskDependency(umaskModificationLock.Lock, umaskModificationLock.Unlock)
+}
+
func (ctx *testContext) getenv(key string) (string, bool) {
for i := len(ctx.env) - 1; i >= 0; i-- {
entry := ctx.env[i]
@@ -114,6 +161,10 @@
return nil
}
+func (ctx *testContext) runWithTimeout(cmd *command, duration time.Duration) error {
+ return ctx.exec(cmd)
+}
+
func (ctx *testContext) exec(cmd *command) error {
ctx.cmdCount++
ctx.lastCmd = cmd
diff --git a/cros_utils/bugs.py b/cros_utils/bugs.py
new file mode 100755
index 0000000..88fb767
--- /dev/null
+++ b/cros_utils/bugs.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities to file bugs."""
+
+import base64
+import datetime
+import enum
+import json
+import os
+from typing import Any, Dict, List, Optional
+
+X20_PATH = '/google/data/rw/teams/c-compiler-chrome/prod_bugs'
+
+
+class WellKnownComponents(enum.IntEnum):
+ """A listing of "well-known" components recognized by our infra."""
+ CrOSToolchainPublic = -1
+ CrOSToolchainPrivate = -2
+
+
+def _WriteBugJSONFile(object_type: str, json_object: Dict[str, Any]):
+ """Writes a JSON file to X20_PATH with the given bug-ish object."""
+ final_object = {
+ 'type': object_type,
+ 'value': json_object,
+ }
+
+ # The name of this has two parts:
+ # - An easily sortable time, to provide uniqueness and let our service send
+ # things in the order they were put into the outbox.
+ # - 64 bits of entropy, so two racing bug writes don't clobber the same file.
+ now = datetime.datetime.utcnow().isoformat('T', 'seconds') + 'Z'
+ entropy = base64.urlsafe_b64encode(os.getrandom(8))
+ entropy_str = entropy.rstrip(b'=').decode('utf-8')
+ file_path = os.path.join(X20_PATH, f'{now}_{entropy_str}.json')
+
+ temp_path = file_path + '.in_progress'
+ try:
+ with open(temp_path, 'w') as f:
+ json.dump(final_object, f)
+ os.rename(temp_path, file_path)
+ except:
+ os.remove(temp_path)
+ raise
+ return file_path
+
+
+def AppendToExistingBug(bug_id: int, body: str):
+ """Sends a reply to an existing bug."""
+ _WriteBugJSONFile('AppendToExistingBugRequest', {
+ 'body': body,
+ 'bug_id': bug_id,
+ })
+
+
+def CreateNewBug(component_id: int,
+ title: str,
+ body: str,
+ assignee: Optional[str] = None,
+ cc: Optional[List[str]] = None):
+ """Sends a request to create a new bug.
+
+ Args:
+ component_id: The component ID to add. Anything from WellKnownComponents
+ also works.
+ title: Title of the bug. Must be nonempty.
+ body: Body of the bug. Must be nonempty.
+ assignee: Assignee of the bug. Must be either an email address, or a
+ "well-known" assignee (detective, mage).
+ cc: A list of emails to add to the CC list. Must either be an email
+ address, or a "well-known" individual (detective, mage).
+ """
+ obj = {
+ 'component_id': component_id,
+ 'subject': title,
+ 'body': body,
+ }
+
+ if assignee:
+ obj['assignee'] = assignee
+
+ if cc:
+ obj['cc'] = cc
+
+ _WriteBugJSONFile('FileNewBugRequest', obj)
+
+
+def SendCronjobLog(cronjob_name: str, failed: bool, message: str):
+ """Sends the record of a cronjob to our bug infra.
+
+ cronjob_name: The name of the cronjob. Expected to remain consistent over
+ time.
+ failed: Whether the job failed or not.
+ message: Any seemingly relevant context. This is pasted verbatim in a bug, if
+ the cronjob infra deems it worthy.
+ """
+ _WriteBugJSONFile('ChrotomationCronjobUpdate', {
+ 'name': cronjob_name,
+ 'message': message,
+ 'failed': failed,
+ })
diff --git a/cros_utils/bugs_test.py b/cros_utils/bugs_test.py
new file mode 100755
index 0000000..03dee64
--- /dev/null
+++ b/cros_utils/bugs_test.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# We're testing protected methods, so allow protected access.
+# pylint: disable=protected-access
+
+"""Tests bug filing bits."""
+
+import json
+import tempfile
+import unittest
+from unittest.mock import patch
+
+import bugs
+
+
+class Tests(unittest.TestCase):
+ """Tests for the bugs module."""
+ def testWritingJSONFileSeemsToWork(self):
+ """Tests JSON file writing."""
+ old_x20_path = bugs.X20_PATH
+
+ def restore_x20_path():
+ bugs.X20_PATH = old_x20_path
+
+ self.addCleanup(restore_x20_path)
+
+ with tempfile.TemporaryDirectory() as tempdir:
+ bugs.X20_PATH = tempdir
+ file_path = bugs._WriteBugJSONFile(
+ 'ObjectType', {
+ 'foo': 'bar',
+ 'baz': bugs.WellKnownComponents.CrOSToolchainPublic,
+ })
+
+ self.assertTrue(file_path.startswith(tempdir),
+ f'Expected {file_path} to start with {tempdir}')
+
+ with open(file_path) as f:
+ self.assertEqual(
+ json.load(f),
+ {
+ 'type': 'ObjectType',
+ 'value': {
+ 'foo': 'bar',
+ 'baz': int(bugs.WellKnownComponents.CrOSToolchainPublic),
+ },
+ },
+ )
+
+ @patch('bugs._WriteBugJSONFile')
+ def testAppendingToBugsSeemsToWork(self, mock_write_json_file):
+ """Tests AppendToExistingBug."""
+ bugs.AppendToExistingBug(1234, 'hello, world!')
+ mock_write_json_file.assert_called_once_with(
+ 'AppendToExistingBugRequest',
+ {
+ 'body': 'hello, world!',
+ 'bug_id': 1234,
+ },
+ )
+
+ @patch('bugs._WriteBugJSONFile')
+ def testBugCreationSeemsToWork(self, mock_write_json_file):
+ """Tests CreateNewBug."""
+ test_case_additions = (
+ {},
+ {
+ 'component_id': bugs.WellKnownComponents.CrOSToolchainPublic,
+ },
+ {
+ 'assignee': '[email protected]',
+ 'cc': ['[email protected]'],
+ },
+ )
+
+ for additions in test_case_additions:
+ test_case = {
+ 'component_id': 123,
+ 'title': 'foo',
+ 'body': 'bar',
+ **additions,
+ }
+
+ bugs.CreateNewBug(**test_case)
+
+ expected_output = {
+ 'component_id': test_case['component_id'],
+ 'subject': test_case['title'],
+ 'body': test_case['body'],
+ }
+
+ assignee = test_case.get('assignee')
+ if assignee:
+ expected_output['assignee'] = assignee
+
+ cc = test_case.get('cc')
+ if cc:
+ expected_output['cc'] = cc
+
+ mock_write_json_file.assert_called_once_with(
+ 'FileNewBugRequest',
+ expected_output,
+ )
+ mock_write_json_file.reset_mock()
+
+ @patch('bugs._WriteBugJSONFile')
+ def testCronjobLogSendingSeemsToWork(self, mock_write_json_file):
+ """Tests SendCronjobLog."""
+ bugs.SendCronjobLog('my_name', False, 'hello, world!')
+ mock_write_json_file.assert_called_once_with(
+ 'ChrotomationCronjobUpdate',
+ {
+ 'name': 'my_name',
+ 'message': 'hello, world!',
+ 'failed': False,
+ },
+ )
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/cros_utils/command_executer.py b/cros_utils/command_executer.py
index aeedf3e..cc0f337 100755
--- a/cros_utils/command_executer.py
+++ b/cros_utils/command_executer.py
@@ -103,14 +103,13 @@
p = None
try:
# pylint: disable=bad-option-value, subprocess-popen-preexec-fn
- p = subprocess.Popen(
- cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- shell=True,
- preexec_fn=os.setsid,
- executable='/bin/bash',
- env=env)
+ p = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ preexec_fn=os.setsid,
+ executable='/bin/bash',
+ env=env)
full_stdout = ''
full_stderr = ''
@@ -159,16 +158,17 @@
if p.poll() is not None:
if terminated_time is None:
terminated_time = time.time()
- elif (terminated_timeout is not None and
- time.time() - terminated_time > terminated_timeout):
+ elif (terminated_timeout is not None
+ and time.time() - terminated_time > terminated_timeout):
if self.logger:
self.logger.LogWarning(
'Timeout of %s seconds reached since '
- 'process termination.' % terminated_timeout, print_to_console)
+ 'process termination.' % terminated_timeout,
+ print_to_console)
break
- if (command_timeout is not None and
- time.time() - started_time > command_timeout):
+ if (command_timeout is not None
+ and time.time() - started_time > command_timeout):
os.killpg(os.getpgid(p.pid), signal.SIGTERM)
if self.logger:
self.logger.LogWarning(
@@ -242,9 +242,11 @@
return command
def WriteToTempShFile(self, contents):
- with tempfile.NamedTemporaryFile(
- 'w', encoding='utf-8', delete=False, prefix=os.uname()[1],
- suffix='.sh') as f:
+ with tempfile.NamedTemporaryFile('w',
+ encoding='utf-8',
+ delete=False,
+ prefix=os.uname()[1],
+ suffix='.sh') as f:
f.write('#!/bin/bash\n')
f.write(contents)
f.flush()
@@ -292,16 +294,15 @@
machine, port = machine.split(':')
# Write all commands to a file.
command_file = self.WriteToTempShFile(cmd)
- retval = self.CopyFiles(
- command_file,
- command_file,
- dest_machine=machine,
- dest_port=port,
- command_terminator=command_terminator,
- chromeos_root=chromeos_root,
- dest_cros=True,
- recursive=False,
- print_to_console=print_to_console)
+ retval = self.CopyFiles(command_file,
+ command_file,
+ dest_machine=machine,
+ dest_port=port,
+ command_terminator=command_terminator,
+ chromeos_root=chromeos_root,
+ dest_cros=True,
+ recursive=False,
+ print_to_console=print_to_console)
if retval:
if self.logger:
self.logger.LogError('Could not run remote command on machine.'
@@ -311,13 +312,12 @@
command = self.RemoteAccessInitCommand(chromeos_root, machine, port)
command += '\nremote_sh bash %s' % command_file
command += '\nl_retval=$?; echo "$REMOTE_OUT"; exit $l_retval'
- retval = self.RunCommandGeneric(
- command,
- return_output,
- command_terminator=command_terminator,
- command_timeout=command_timeout,
- terminated_timeout=terminated_timeout,
- print_to_console=print_to_console)
+ retval = self.RunCommandGeneric(command,
+ return_output,
+ command_terminator=command_terminator,
+ command_timeout=command_timeout,
+ terminated_timeout=terminated_timeout,
+ print_to_console=print_to_console)
if return_output:
connect_signature = ('Initiating first contact with remote host\n' +
'Connection OK\n')
@@ -372,13 +372,13 @@
if self.logger:
self.logger.LogCmd(command, print_to_console=print_to_console)
- with tempfile.NamedTemporaryFile(
- 'w',
- encoding='utf-8',
- delete=False,
- dir=os.path.join(chromeos_root, 'src/scripts'),
- suffix='.sh',
- prefix='in_chroot_cmd') as f:
+ with tempfile.NamedTemporaryFile('w',
+ encoding='utf-8',
+ delete=False,
+ dir=os.path.join(chromeos_root,
+ 'src/scripts'),
+ suffix='.sh',
+ prefix='in_chroot_cmd') as f:
f.write('#!/bin/bash\n')
f.write(command)
f.write('\n')
@@ -393,7 +393,11 @@
if return_output:
ret = self.RunCommand(
'cd %s; cros_sdk %s -- true' % (chromeos_root, cros_sdk_options),
- env=env)
+ env=env,
+ # Give this command a long time to execute; it might involve setting
+ # the chroot up, or running fstrim on its image file. Both of these
+ # operations can take well over the timeout default of 10 seconds.
+ terminated_timeout=5 * 60)
if ret:
return (ret, '', '')
@@ -402,14 +406,13 @@
command = ("cd %s; cros_sdk %s -- bash -c '%s/%s'" %
(chromeos_root, cros_sdk_options, CHROMEOS_SCRIPTS_DIR,
os.path.basename(command_file)))
- ret = self.RunCommandGeneric(
- command,
- return_output,
- command_terminator=command_terminator,
- command_timeout=command_timeout,
- terminated_timeout=terminated_timeout,
- print_to_console=print_to_console,
- env=env)
+ ret = self.RunCommandGeneric(command,
+ return_output,
+ command_terminator=command_terminator,
+ command_timeout=command_timeout,
+ terminated_timeout=terminated_timeout,
+ print_to_console=print_to_console,
+ env=env)
os.remove(command_file)
return ret
@@ -445,11 +448,10 @@
username=None,
command_terminator=None):
cmd = ' ;\n'.join(cmdlist)
- return self.RunCommand(
- cmd,
- machine=machine,
- username=username,
- command_terminator=command_terminator)
+ return self.RunCommand(cmd,
+ machine=machine,
+ username=username,
+ command_terminator=command_terminator)
def CopyFiles(self,
src,
@@ -505,12 +507,11 @@
else:
command += rsync_prefix + 'root@%s:%s %s' % (cros_machine, src, dest)
- return self.RunCommand(
- command,
- machine=host_machine,
- username=host_user,
- command_terminator=command_terminator,
- print_to_console=print_to_console)
+ return self.RunCommand(command,
+ machine=host_machine,
+ username=host_user,
+ command_terminator=command_terminator,
+ print_to_console=print_to_console)
if dest_machine == src_machine:
command = 'rsync -a %s %s' % (src, dest)
@@ -519,12 +520,11 @@
src_machine = os.uname()[1]
src_user = getpass.getuser()
command = 'rsync -a %s@%s:%s %s' % (src_user, src_machine, src, dest)
- return self.RunCommand(
- command,
- machine=dest_machine,
- username=dest_user,
- command_terminator=command_terminator,
- print_to_console=print_to_console)
+ return self.RunCommand(command,
+ machine=dest_machine,
+ username=dest_user,
+ command_terminator=command_terminator,
+ print_to_console=print_to_console)
def RunCommand2(self,
cmd,
@@ -593,8 +593,9 @@
def notify_line(self):
p = self._buf.find('\n')
while p >= 0:
- self._line_consumer(
- line=self._buf[:p + 1], output=self._name, pobject=self._pobject)
+ self._line_consumer(line=self._buf[:p + 1],
+ output=self._name,
+ pobject=self._pobject)
if p < len(self._buf) - 1:
self._buf = self._buf[p + 1:]
p = self._buf.find('\n')
@@ -606,8 +607,9 @@
def notify_eos(self):
# Notify end of stream. The last line may not end with a '\n'.
if self._buf != '':
- self._line_consumer(
- line=self._buf, output=self._name, pobject=self._pobject)
+ self._line_consumer(line=self._buf,
+ output=self._name,
+ pobject=self._pobject)
self._buf = ''
if self.log_level == 'verbose':
diff --git a/cros_utils/email_sender.py b/cros_utils/email_sender.py
index 6b8893e..df8afbc 100755
--- a/cros_utils/email_sender.py
+++ b/cros_utils/email_sender.py
@@ -71,7 +71,7 @@
"From" email address. Must be nonempty.
well_known_recipients: a list of well-known recipients for the email.
These are translated into addresses by our mailer.
- Current potential values for this are ('sheriff',
+ Current potential values for this are ('detective',
'cwp-team', 'cros-team', 'mage'). Either this or
direct_recipients must be a nonempty list.
direct_recipients: @google.com emails to send addresses to. Either this
@@ -89,8 +89,8 @@
type(well_known_recipients))
if not isinstance(direct_recipients, (tuple, list)):
- raise ValueError(
- '`direct_recipients` is unexpectedly a %s' % type(direct_recipients))
+ raise ValueError('`direct_recipients` is unexpectedly a %s' %
+ type(direct_recipients))
if not subject or not identifier:
raise ValueError('both `subject` and `identifier` must be nonempty')
@@ -205,8 +205,8 @@
to_be_deleted = []
try:
- with tempfile.NamedTemporaryFile(
- 'w', encoding='utf-8', delete=False) as f:
+ with tempfile.NamedTemporaryFile('w', encoding='utf-8',
+ delete=False) as f:
f.write(text_to_send)
f.flush()
to_be_deleted.append(f.name)
@@ -239,8 +239,10 @@
report_suffix = '_report.html'
else:
report_suffix = '_report.txt'
- with tempfile.NamedTemporaryFile(
- 'w', encoding='utf-8', delete=False, suffix=report_suffix) as f:
+ with tempfile.NamedTemporaryFile('w',
+ encoding='utf-8',
+ delete=False,
+ suffix=report_suffix) as f:
f.write(attachment.content)
f.flush()
attachment_files.append(f.name)
diff --git a/cros_utils/email_sender_unittest.py b/cros_utils/email_sender_unittest.py
index 7349219..ae41f14 100755
--- a/cros_utils/email_sender_unittest.py
+++ b/cros_utils/email_sender_unittest.py
@@ -69,7 +69,7 @@
'subject': 'foo',
'identifier': 'foo',
# non-list recipients
- 'well_known_recipients': 'sheriff',
+ 'well_known_recipients': 'detective',
'text_body': 'hi',
},
]
@@ -89,8 +89,8 @@
def actual_write_file(file_path):
nonlocal written_obj
- self.assertTrue(
- file_path.startswith(email_sender.X20_PATH + '/'), file_path)
+ self.assertTrue(file_path.startswith(email_sender.X20_PATH + '/'),
+ file_path)
f = io.StringIO()
yield f
written_obj = json.loads(f.getvalue())
@@ -99,7 +99,7 @@
email_sender.EmailSender().SendX20Email(
subject='hello',
identifier='world',
- well_known_recipients=['sheriff'],
+ well_known_recipients=['detective'],
direct_recipients=['[email protected]'],
text_body='text',
html_body='html',
@@ -109,7 +109,7 @@
written_obj, {
'subject': 'hello',
'email_identifier': 'world',
- 'well_known_recipients': ['sheriff'],
+ 'well_known_recipients': ['detective'],
'direct_recipients': ['[email protected]'],
'body': 'text',
'html_body': 'html',
diff --git a/cros_utils/manifest_versions.py b/cros_utils/manifest_versions.py
deleted file mode 100644
index 4838de3..0000000
--- a/cros_utils/manifest_versions.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2013 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tools for searching/manipulating the manifests repository."""
-
-from __future__ import print_function
-
-__author__ = '[email protected] (Luis Lozano)'
-
-import copy
-import os
-import re
-import shutil
-import tempfile
-import time
-
-from cros_utils import command_executer
-from cros_utils import logger
-
-MANIFEST_VERSION_MAIN_BRANCH = 'master'
-
-
-def IsCrosVersion(version):
- match = re.search(r'(\d+\.\d+\.\d+\.\d+)', version)
- return match is not None
-
-
-def IsRFormatCrosVersion(version):
- match = re.search(r'(R\d+-\d+\.\d+\.\d+)', version)
- return match is not None
-
-
-def RFormatCrosVersion(version):
- assert IsCrosVersion(version)
- tmp_major, tmp_minor = version.split('.', 1)
- rformat = 'R' + tmp_major + '-' + tmp_minor
- assert IsRFormatCrosVersion(rformat)
- return rformat
-
-
-class ManifestVersions(object):
- """This class handles interactions with the manifests repo."""
-
- def __init__(self, internal=True):
- self.internal = internal
- self.clone_location = tempfile.mkdtemp()
- self.ce = command_executer.GetCommandExecuter()
- if internal:
- versions_git = ('https://chrome-internal.googlesource.com/'
- 'chromeos/manifest-versions.git')
- else:
- versions_git = (
- 'https://chromium.googlesource.com/chromiumos/manifest-versions.git')
- commands = [
- 'cd {0}'.format(self.clone_location),
- 'git clone {0}'.format(versions_git)
- ]
- ret = self.ce.RunCommands(commands)
- if ret:
- logger.GetLogger().LogFatal('Failed to clone manifest-versions.')
-
- def __del__(self):
- if self.clone_location:
- shutil.rmtree(self.clone_location)
-
- def TimeToVersionChromeOS(self, my_time):
- """Convert timestamp to version number, in ChromeOS/Paladin."""
- cur_time = time.mktime(time.gmtime())
- des_time = float(my_time)
- if cur_time - des_time > 7000000:
- logger.GetLogger().LogFatal('The time you specify is too early.')
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout -f $(git rev-list' +
- ' --max-count=1 --before={0} origin/{1})'.format(
- my_time, MANIFEST_VERSION_MAIN_BRANCH)
- ]
- ret = self.ce.RunCommands(commands)
- if ret:
- logger.GetLogger().LogFatal('Failed to checkout manifest at '
- 'specified time')
- path = os.path.realpath('{0}/manifest-versions/LKGM/lkgm.xml'.format(
- self.clone_location))
- pp = path.split('/')
- new_list = copy.deepcopy(pp)
- for i, e in enumerate(pp):
- if e == 'android-LKGM-candidates':
- new_list[i] = 'paladin'
- chrome_path = '/'.join(new_list)
- if not os.path.exists(chrome_path):
- logger.GetLogger().LogOutput('LKGM path is %s' % path)
- logger.GetLogger().LogOutput('Cannot find path %s' % chrome_path)
- pieces = os.path.basename(chrome_path).split('.')
- pieces = pieces[:-2]
- new_base = '.'.join(pieces) + '*'
- wild_path = os.path.join('/', '/'.join(new_list[:-1]), new_base)
- command = 'ls %s' % wild_path
- ret, out, _ = self.ce.RunCommandWOutput(command)
- if ret == 0:
- out = out.strip()
- files = out.split('\n')
- latest = files[-1]
- small = os.path.basename(latest).split('.xml')[0]
- version = pp[-2] + '.' + small
- else:
- small = os.path.basename(path).split('.xml')[0]
- version = pp[-2] + '.' + small
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout {0}'.format(MANIFEST_VERSION_MAIN_BRANCH)
- ]
- self.ce.RunCommands(commands)
- return version
-
- def TimeToVersion(self, my_time):
- """Convert timestamp to version number."""
- cur_time = time.mktime(time.gmtime())
- des_time = float(my_time)
- if cur_time - des_time > 7000000:
- logger.GetLogger().LogFatal('The time you specify is too early.')
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout -f $(git rev-list' +
- ' --max-count=1 --before={0} origin/{1})'.format(
- my_time, MANIFEST_VERSION_MAIN_BRANCH)
- ]
- ret = self.ce.RunCommands(commands)
- if ret:
- logger.GetLogger().LogFatal('Failed to checkout manifest at '
- 'specified time')
- path = os.path.realpath('{0}/manifest-versions/LKGM/lkgm.xml'.format(
- self.clone_location))
- pp = path.split('/')
- small = os.path.basename(path).split('.xml')[0]
- version = pp[-2] + '.' + small
- commands = [
- 'cd {0}'.format(self.clone_location), 'cd manifest-versions',
- 'git checkout {0}'.format(MANIFEST_VERSION_MAIN_BRANCH)
- ]
- self.ce.RunCommands(commands)
- return version
-
- def GetManifest(self, version, to_file):
- """Get the manifest file from a given chromeos-internal version."""
- assert not IsRFormatCrosVersion(version)
- version = version.split('.', 1)[1]
- os.chdir(self.clone_location)
- files = [
- os.path.join(r, f)
- for r, _, fs in os.walk('.')
- for f in fs
- if version in f
- ]
- if files:
- command = 'cp {0} {1}'.format(files[0], to_file)
- ret = self.ce.RunCommand(command)
- if ret:
- raise RuntimeError('Cannot copy manifest to {0}'.format(to_file))
- else:
- raise RuntimeError('Version {0} is not available.'.format(version))
diff --git a/cros_utils/misc.py b/cros_utils/misc.py
index 93d1b3b..a0d0de7 100644
--- a/cros_utils/misc.py
+++ b/cros_utils/misc.py
@@ -15,7 +15,6 @@
import re
import shutil
import sys
-import traceback
from cros_utils import command_executer
from cros_utils import logger
@@ -24,8 +23,6 @@
TOOLCHAIN_UTILS_PATH = ('/mnt/host/source/src/third_party/toolchain-utils/'
'cros_utils/toolchain_utils.sh')
-CROS_MAIN_BRANCH = 'cros/master'
-
def GetChromeOSVersionFromLSBVersion(lsb_version):
"""Get Chromeos version from Lsb version."""
@@ -112,8 +109,8 @@
def FormatCommands(commands):
- return ApplySubs(
- str(commands), ('&&', '&&\n'), (';', ';\n'), (r'\n+\s*', '\n'))
+ return ApplySubs(str(commands), ('&&', '&&\n'), (';', ';\n'),
+ (r'\n+\s*', '\n'))
def GetImageDir(chromeos_root, board):
@@ -155,16 +152,16 @@
withdebug_flag = '--nowithdebug'
return ('%s/build_packages %s --withdev --withtest --withautotest '
'--skip_toolchain_update %s --board=%s '
- '--accept_licenses=@CHROMEOS' % (CHROMEOS_SCRIPTS_DIR, usepkg_flag,
- withdebug_flag, board))
+ '--accept_licenses=@CHROMEOS' %
+ (CHROMEOS_SCRIPTS_DIR, usepkg_flag, withdebug_flag, board))
def GetBuildImageCommand(board, dev=False):
dev_args = ''
if dev:
dev_args = '--noenable_rootfs_verification --disk_layout=2gb-rootfs'
- return ('%s/build_image --board=%s %s test' % (CHROMEOS_SCRIPTS_DIR, board,
- dev_args))
+ return ('%s/build_image --board=%s %s test' %
+ (CHROMEOS_SCRIPTS_DIR, board, dev_args))
def GetSetupBoardCommand(board, usepkg=None, force=None):
@@ -193,8 +190,8 @@
def GetCtargetFromBoard(board, chromeos_root):
"""Get Ctarget from board."""
base_board = board.split('_')[0]
- command = ('source %s; get_ctarget_from_board %s' % (TOOLCHAIN_UTILS_PATH,
- base_board))
+ command = ('source %s; get_ctarget_from_board %s' %
+ (TOOLCHAIN_UTILS_PATH, base_board))
ce = command_executer.GetCommandExecuter()
ret, out, _ = ce.ChrootRunCommandWOutput(chromeos_root, command)
if ret != 0:
@@ -207,8 +204,8 @@
def GetArchFromBoard(board, chromeos_root):
"""Get Arch from board."""
base_board = board.split('_')[0]
- command = (
- 'source %s; get_board_arch %s' % (TOOLCHAIN_UTILS_PATH, base_board))
+ command = ('source %s; get_board_arch %s' %
+ (TOOLCHAIN_UTILS_PATH, base_board))
ce = command_executer.GetCommandExecuter()
ret, out, _ = ce.ChrootRunCommandWOutput(chromeos_root, command)
if ret != 0:
@@ -316,23 +313,22 @@
def HasGitStagedChanges(git_dir):
"""Return True if git repository has staged changes."""
- command = 'cd {0} && git diff --quiet --cached --exit-code HEAD'.format(
- git_dir)
+ command = f'cd {git_dir} && git diff --quiet --cached --exit-code HEAD'
return command_executer.GetCommandExecuter().RunCommand(
command, print_to_console=False)
def HasGitUnstagedChanges(git_dir):
"""Return True if git repository has un-staged changes."""
- command = 'cd {0} && git diff --quiet --exit-code HEAD'.format(git_dir)
+ command = f'cd {git_dir} && git diff --quiet --exit-code HEAD'
return command_executer.GetCommandExecuter().RunCommand(
command, print_to_console=False)
def HasGitUntrackedChanges(git_dir):
"""Return True if git repository has un-tracked changes."""
- command = ('cd {0} && test -z '
- '$(git ls-files --exclude-standard --others)').format(git_dir)
+ command = (f'cd {git_dir} && test -z '
+ '$(git ls-files --exclude-standard --others)')
return command_executer.GetCommandExecuter().RunCommand(
command, print_to_console=False)
@@ -352,8 +348,8 @@
The git hash for the symbolic name or None if fails.
"""
- command = ('cd {0} && git log -n 1 --pretty="format:%H" {1}').format(
- git_dir, commit_symbolic_name)
+ command = (f'cd {git_dir} && git log -n 1'
+ f' --pretty="format:%H" {commit_symbolic_name}')
rv, out, _ = command_executer.GetCommandExecuter().RunCommandWOutput(
command, print_to_console=False)
if rv == 0:
@@ -393,7 +389,7 @@
Returns:
A list containing all the changed files.
"""
- command = 'cd {0} && git diff --name-only'.format(git_dir)
+ command = f'cd {git_dir} && git diff --name-only'
if staged:
command += ' --cached'
if path:
@@ -408,8 +404,8 @@
def IsChromeOsTree(chromeos_root):
return (os.path.isdir(
- os.path.join(chromeos_root, 'src/third_party/chromiumos-overlay')) and
- os.path.isdir(os.path.join(chromeos_root, 'manifest')))
+ os.path.join(chromeos_root, 'src/third_party/chromiumos-overlay'))
+ and os.path.isdir(os.path.join(chromeos_root, 'manifest')))
def DeleteChromeOsTree(chromeos_root, dry_run=False):
@@ -423,11 +419,10 @@
True if everything is ok.
"""
if not IsChromeOsTree(chromeos_root):
- logger.GetLogger().LogWarning(
- '"{0}" does not seem to be a valid chromeos tree, do nothing.'.format(
- chromeos_root))
+ logger.GetLogger().LogWarning(f'"{chromeos_root}" does not seem to be a'
+ ' valid chromeos tree, do nothing.')
return False
- cmd0 = 'cd {0} && cros_sdk --delete'.format(chromeos_root)
+ cmd0 = f'cd {chromeos_root} && cros_sdk --delete'
if dry_run:
print(cmd0)
else:
@@ -435,10 +430,10 @@
cmd0, print_to_console=True) != 0:
return False
- cmd1 = ('export CHROMEOSDIRNAME="$(dirname $(cd {0} && pwd))" && '
- 'export CHROMEOSBASENAME="$(basename $(cd {0} && pwd))" && '
- 'cd $CHROMEOSDIRNAME && sudo rm -fr $CHROMEOSBASENAME'
- ).format(chromeos_root)
+ cmd1 = (
+ f'export CHROMEOSDIRNAME="$(dirname $(cd {chromeos_root} && pwd))" && '
+ f'export CHROMEOSBASENAME="$(basename $(cd {chromeos_root} && pwd))" && '
+ 'cd $CHROMEOSDIRNAME && sudo rm -fr $CHROMEOSBASENAME')
if dry_run:
print(cmd1)
return True
@@ -447,54 +442,6 @@
cmd1, print_to_console=True) == 0
-def ApplyGerritPatches(chromeos_root,
- gerrit_patch_string,
- branch=CROS_MAIN_BRANCH):
- """Apply gerrit patches on a chromeos tree.
-
- Args:
- chromeos_root: chromeos tree path
- gerrit_patch_string: a patch string just like the one gives to cbuildbot,
- 'id1 id2 *id3 ... idn'. A prefix of '* means this is an internal patch.
- branch: the tree based on which to apply the patches.
-
- Returns:
- True if success.
- """
-
- ### First of all, we need chromite libs
- sys.path.append(os.path.join(chromeos_root, 'chromite'))
- # Imports below are ok after modifying path to add chromite.
- # Pylint cannot detect that and complains.
- # pylint: disable=import-error, import-outside-toplevel
- from lib import git
- from lib import gerrit
- manifest = git.ManifestCheckout(chromeos_root)
- patch_list = gerrit_patch_string.split(' ')
- ### This takes time, print log information.
- logger.GetLogger().LogOutput('Retrieving patch information from server ...')
- patch_info_list = gerrit.GetGerritPatchInfo(patch_list)
- for pi in patch_info_list:
- project_checkout = manifest.FindCheckout(pi.project, strict=False)
- if not project_checkout:
- logger.GetLogger().LogError(
- 'Failed to find patch project "{project}" in manifest.'.format(
- project=pi.project))
- return False
-
- pi_str = '{project}:{ref}'.format(project=pi.project, ref=pi.ref)
- try:
- project_git_path = project_checkout.GetPath(absolute=True)
- logger.GetLogger().LogOutput('Applying patch "{0}" in "{1}" ...'.format(
- pi_str, project_git_path))
- pi.Apply(project_git_path, branch, trivial=False)
- except Exception:
- traceback.print_exc(file=sys.stdout)
- logger.GetLogger().LogError('Failed to apply patch "{0}"'.format(pi_str))
- return False
- return True
-
-
def BooleanPrompt(prompt='Do you want to continue?',
default=True,
true_value='yes',
@@ -515,8 +462,8 @@
true_value, false_value = true_value.lower(), false_value.lower()
true_text, false_text = true_value, false_value
if true_value == false_value:
- raise ValueError(
- 'true_value and false_value must differ: got %r' % true_value)
+ raise ValueError('true_value and false_value must differ: got %r' %
+ true_value)
if default:
true_text = true_text[0].upper() + true_text[1:]
diff --git a/crosperf/experiment_files/README.md b/crosperf/README.md
similarity index 78%
rename from crosperf/experiment_files/README.md
rename to crosperf/README.md
index 8c1fe20..18601b6 100644
--- a/crosperf/experiment_files/README.md
+++ b/crosperf/README.md
@@ -1,4 +1,4 @@
-# Experiment files
+# experiment_files
To use these experiment files, replace the board, remote and images
placeholders and run crosperf on them.
@@ -37,3 +37,9 @@
iterations: 1
}
```
+
+# default_remotes
+
+This is the list of machines allocated for toolchain team.
+This should be kept in sync with:
+https://chromeos-swarming.appspot.com/botlist?c=id&c=task&c=label-board&c=label-pool&c=os&c=status&d=asc&f=label-pool%3Atoolchain&k=label-pool&s=id
diff --git a/crosperf/benchmark_run_unittest.py b/crosperf/benchmark_run_unittest.py
index ab86300..9d815b8 100755
--- a/crosperf/benchmark_run_unittest.py
+++ b/crosperf/benchmark_run_unittest.py
@@ -61,7 +61,7 @@
cache_only=False,
log_level='average',
compiler='gcc',
- skylab=False)
+ crosfleet=False)
self.test_cache_conditions = [
CacheConditions.CACHE_FILE_EXISTS, CacheConditions.CHECKSUMS_MATCH
@@ -86,7 +86,7 @@
cache_only=False,
log_level='average',
compiler='gcc',
- skylab=False)
+ crosfleet=False)
logging_level = 'average'
m = MockMachineManager('/tmp/chromeos_root', 0, logging_level, '')
@@ -133,10 +133,11 @@
pass
def test_run(self):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
def MockLogOutput(msg, print_to_console=False):
"""Helper function for test_run."""
@@ -273,10 +274,11 @@
self.assertEqual(self.status, ['FAILED'])
def test_terminate_pass(self):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
def GetLastEventPassed():
"""Helper function for test_terminate_pass"""
@@ -300,10 +302,11 @@
self.assertEqual(self.status, benchmark_run.STATUS_FAILED)
def test_terminate_fail(self):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
def GetLastEventFailed():
"""Helper function for test_terminate_fail"""
@@ -327,10 +330,11 @@
self.assertEqual(self.status, benchmark_run.STATUS_SUCCEEDED)
def test_acquire_machine(self):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
br.terminated = True
self.assertRaises(Exception, br.AcquireMachine)
@@ -344,10 +348,11 @@
self.assertEqual(machine.name, 'chromeos1-row3-rack5-host7.cros')
def test_get_extra_autotest_args(self):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
def MockLogError(err_msg):
"""Helper function for test_get_extra_autotest_args"""
@@ -379,10 +384,11 @@
@mock.patch.object(SuiteRunner, 'Run')
@mock.patch.object(Result, 'CreateFromRun')
def test_run_test(self, mock_result, mock_runner):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
self.status = []
@@ -409,15 +415,17 @@
br.profiler_args)
self.assertEqual(mock_result.call_count, 1)
- mock_result.assert_called_with(
- self.mock_logger, 'average', self.test_label, None, "{'Score':100}", '',
- 0, 'page_cycler.netsim.top_10', 'telemetry_Crosperf', '')
+ mock_result.assert_called_with(self.mock_logger, 'average', self.test_label,
+ None, "{'Score':100}", '', 0,
+ 'page_cycler.netsim.top_10',
+ 'telemetry_Crosperf', '')
def test_set_cache_conditions(self):
- br = benchmark_run.BenchmarkRun(
- 'test_run', self.test_benchmark, self.test_label, 1,
- self.test_cache_conditions, self.mock_machine_manager, self.mock_logger,
- 'average', '', {})
+ br = benchmark_run.BenchmarkRun('test_run', self.test_benchmark,
+ self.test_label, 1,
+ self.test_cache_conditions,
+ self.mock_machine_manager, self.mock_logger,
+ 'average', '', {})
phony_cache_conditions = [123, 456, True, False]
diff --git a/crosperf/crosperf_autolock.py b/crosperf/crosperf_autolock.py
new file mode 100755
index 0000000..b593fa9
--- /dev/null
+++ b/crosperf/crosperf_autolock.py
@@ -0,0 +1,281 @@
+#!/usr/bin/env python3
+
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper script to automatically lock devices for crosperf."""
+
+import os
+import sys
+import argparse
+import subprocess
+import contextlib
+import json
+from typing import Optional, Any
+import dataclasses
+
+# Have to do sys.path hackery because crosperf relies on PYTHONPATH
+# modifications.
+PARENT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.append(PARENT_DIR)
+
+
+def main(sys_args: list[str]) -> Optional[str]:
+ """Run crosperf_autolock. Returns error msg or None"""
+ args, leftover_args = parse_args(sys_args)
+ fleet_params = [
+ CrosfleetParams(board=args.board,
+ pool=args.pool,
+ lease_time=args.lease_time)
+ for _ in range(args.num_leases)
+ ]
+ if not fleet_params:
+ return ('No board names identified. If you want to use'
+ ' a known host, just use crosperf directly.')
+ try:
+ _run_crosperf(fleet_params, args.dut_lock_timeout, leftover_args)
+ except BoardLockError as e:
+ _eprint('ERROR:', e)
+ _eprint('May need to login to crosfleet? Run "crosfleet login"')
+ _eprint('The leases may also be successful later on. '
+ 'Check with "crosfleet dut leases"')
+ return 'crosperf_autolock failed'
+ except BoardReleaseError as e:
+ _eprint('ERROR:', e)
+ _eprint('May need to re-run "crosfleet dut abandon"')
+ return 'crosperf_autolock failed'
+ return None
+
+
+def parse_args(args: list[str]) -> tuple[Any, list]:
+ """Parse the CLI arguments."""
+ parser = argparse.ArgumentParser(
+ 'crosperf_autolock',
+ description='Wrapper around crosperf'
+ ' to autolock DUTs from crosfleet.',
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+ parser.add_argument('--board',
+ type=str,
+ help='Space or comma separated list of boards to lock',
+ required=True,
+ default=argparse.SUPPRESS)
+ parser.add_argument('--num-leases',
+ type=int,
+ help='Number of boards to lock.',
+ metavar='NUM',
+ default=1)
+ parser.add_argument('--pool',
+ type=str,
+ help='Pool to pull from.',
+ default='DUT_POOL_QUOTA')
+ parser.add_argument('--dut-lock-timeout',
+ type=float,
+ metavar='SEC',
+ help='Number of seconds we want to try to lease a board'
+ ' from crosfleet. This option does NOT change the'
+ ' lease length.',
+ default=600)
+ parser.add_argument('--lease-time',
+ type=int,
+ metavar='MIN',
+ help='Number of minutes to lock the board. Max is 1440.',
+ default=1440)
+ parser.epilog = (
+ 'For more detailed flags, you have to read the args taken by the'
+ ' crosperf executable. Args are passed transparently to crosperf.')
+ return parser.parse_known_args(args)
+
+
+class BoardLockError(Exception):
+ """Error to indicate failure to lock a board."""
+
+ def __init__(self, msg: str):
+ self.msg = 'BoardLockError: ' + msg
+ super().__init__(self.msg)
+
+
+class BoardReleaseError(Exception):
+ """Error to indicate failure to release a board."""
+
+ def __init__(self, msg: str):
+ self.msg = 'BoardReleaseError: ' + msg
+ super().__init__(self.msg)
+
+
[email protected](frozen=True)
+class CrosfleetParams:
+ """Dataclass to hold all crosfleet parameterizations."""
+ board: str
+ pool: str
+ lease_time: int
+
+
+def _eprint(*msg, **kwargs):
+ print(*msg, file=sys.stderr, **kwargs)
+
+
+def _run_crosperf(crosfleet_params: list[CrosfleetParams], lock_timeout: float,
+ leftover_args: list[str]):
+ """Autolock devices and run crosperf with leftover arguments.
+
+ Raises:
+ BoardLockError: When board was unable to be locked.
+ BoardReleaseError: When board was unable to be released.
+ """
+ if not crosfleet_params:
+ raise ValueError('No crosfleet params given; cannot call crosfleet.')
+
+ # We'll assume all the boards are the same type, which seems to be the case
+ # in experiments that actually get used.
+ passed_board_arg = crosfleet_params[0].board
+ with contextlib.ExitStack() as stack:
+ dut_hostnames = []
+ for param in crosfleet_params:
+ print(
+ f'Sent lock request for {param.board} for {param.lease_time} minutes'
+ '\nIf this fails, you may need to run "crosfleet dut abandon <...>"')
+ # May raise BoardLockError, abandoning previous DUTs.
+ dut_hostname = stack.enter_context(
+ crosfleet_machine_ctx(
+ param.board,
+ param.lease_time,
+ lock_timeout,
+ {'label-pool': param.pool},
+ ))
+ if dut_hostname:
+ print(f'Locked {param.board} machine: {dut_hostname}')
+ dut_hostnames.append(dut_hostname)
+
+ # We import crosperf late, because this import is extremely slow.
+ # We don't want the user to wait several seconds just to get
+ # help info.
+ import crosperf
+ for dut_hostname in dut_hostnames:
+ crosperf.Main([
+ sys.argv[0],
+ '--no_lock',
+ 'True',
+ '--remote',
+ dut_hostname,
+ '--board',
+ passed_board_arg,
+ ] + leftover_args)
+
+
[email protected]
+def crosfleet_machine_ctx(board: str,
+ lease_minutes: int,
+ lock_timeout: float,
+ dims: dict[str, Any],
+ abandon_timeout: float = 120.0) -> Any:
+ """Acquire dut from crosfleet, and release once it leaves the context.
+
+ Args:
+ board: Board type to lease.
+ lease_minutes: Length of lease, in minutes.
+ lock_timeout: How long to wait for a lock until quitting.
+ dims: Dictionary of dimension arguments to pass to crosfleet's '-dims'
+ abandon_timeout (optional): How long to wait for releasing until quitting.
+
+ Yields:
+ A string representing the crosfleet DUT hostname.
+
+ Raises:
+ BoardLockError: When board was unable to be locked.
+ BoardReleaseError: When board was unable to be released.
+ """
+ # This lock may raise an exception, but if it does, we can't release
+ # the DUT anyways as we won't have the dut_hostname.
+ dut_hostname = crosfleet_autolock(board, lease_minutes, dims, lock_timeout)
+ try:
+ yield dut_hostname
+ finally:
+ if dut_hostname:
+ crosfleet_release(dut_hostname, abandon_timeout)
+
+
+def crosfleet_autolock(board: str, lease_minutes: int, dims: dict[str, Any],
+ timeout_sec: float) -> str:
+ """Lock a device using crosfleet, paramaterized by the board type.
+
+ Args:
+ board: Board of the DUT we want to lock.
+ lease_minutes: Number of minutes we're trying to lease the DUT for.
+ dims: Dictionary of dimension arguments to pass to crosfleet's '-dims'
+ timeout_sec: Number of seconds to try to lease the DUT. Default 120s.
+
+ Returns:
+ The hostname of the board, or empty string if it couldn't be parsed.
+
+ Raises:
+ BoardLockError: When board was unable to be locked.
+ """
+ crosfleet_cmd_args = [
+ 'crosfleet',
+ 'dut',
+ 'lease',
+ '-json',
+ '-reason="crosperf autolock"',
+ f'-board={board}',
+ f'-minutes={lease_minutes}',
+ ]
+ if dims:
+ dims_arg = ','.join('{}={}'.format(k, v) for k, v in dims.items())
+ crosfleet_cmd_args.extend(['-dims', f'{dims_arg}'])
+
+ try:
+ output = subprocess.check_output(crosfleet_cmd_args,
+ timeout=timeout_sec,
+ encoding='utf-8')
+ except subprocess.CalledProcessError as e:
+ raise BoardLockError(
+ f'crosfleet dut lease failed with exit code: {e.returncode}')
+ except subprocess.TimeoutExpired as e:
+ raise BoardLockError(f'crosfleet dut lease timed out after {timeout_sec}s;'
+ ' please abandon the dut manually.')
+
+ try:
+ json_obj = json.loads(output)
+ dut_hostname = json_obj['DUT']['Hostname']
+ if not isinstance(dut_hostname, str):
+ raise TypeError('dut_hostname was not a string')
+ except (json.JSONDecodeError, IndexError, KeyError, TypeError) as e:
+ raise BoardLockError(
+ f'crosfleet dut lease output was parsed incorrectly: {e!r};'
+ f' observed output was {output}')
+ return _maybe_append_suffix(dut_hostname)
+
+
+def crosfleet_release(dut_hostname: str, timeout_sec: float = 120.0):
+ """Release a crosfleet device.
+
+ Consider using the context managed crosfleet_machine_context
+
+ Args:
+ dut_hostname: Name of the device we want to release.
+ timeout_sec: Number of seconds to try to release the DUT. Default is 120s.
+
+ Raises:
+ BoardReleaseError: Potentially failed to abandon the lease.
+ """
+ crosfleet_cmd_args = [
+ 'crosfleet',
+ 'dut',
+ 'abandon',
+ dut_hostname,
+ ]
+ exit_code = subprocess.call(crosfleet_cmd_args, timeout=timeout_sec)
+ if exit_code != 0:
+ raise BoardReleaseError(
+ f'"crosfleet dut abandon" had exit code {exit_code}')
+
+
+def _maybe_append_suffix(hostname: str) -> str:
+ if hostname.endswith('.cros') or '.cros.' in hostname:
+ return hostname
+ return hostname + '.cros'
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/crosperf/crosperf_unittest.py b/crosperf/crosperf_unittest.py
index 9c7d52a..774159f 100755
--- a/crosperf/crosperf_unittest.py
+++ b/crosperf/crosperf_unittest.py
@@ -55,20 +55,19 @@
def testConvertOptionsToSettings(self):
parser = argparse.ArgumentParser()
- parser.add_argument(
- '-l',
- '--log_dir',
- dest='log_dir',
- default='',
- help='The log_dir, default is under '
- '<crosperf_logs>/logs')
+ parser.add_argument('-l',
+ '--log_dir',
+ dest='log_dir',
+ default='',
+ help='The log_dir, default is under '
+ '<crosperf_logs>/logs')
crosperf.SetupParserOptions(parser)
argv = ['crosperf/crosperf.py', 'temp.exp', '--rerun=True']
options, _ = parser.parse_known_args(argv)
settings = crosperf.ConvertOptionsToSettings(options)
self.assertIsNotNone(settings)
self.assertIsInstance(settings, settings_factory.GlobalSettings)
- self.assertEqual(len(settings.fields), 39)
+ self.assertEqual(len(settings.fields), 40)
self.assertTrue(settings.GetField('rerun'))
argv = ['crosperf/crosperf.py', 'temp.exp']
options, _ = parser.parse_known_args(argv)
diff --git a/crosperf/default_remotes b/crosperf/default_remotes
index f23fe21..faecb83 100644
--- a/crosperf/default_remotes
+++ b/crosperf/default_remotes
@@ -1,9 +1,8 @@
-bob : chromeos2-row10-rack9-host1.cros chromeos2-row10-rack9-host3.cros
-coral : chromeos2-row9-rack9-host9.cros chromeos2-row9-rack9-host11.cros chromeos2-row9-rack9-host13.cros
-elm : chromeos2-row10-rack9-host19.cros chromeos2-row10-rack9-host21.cros
-chell : chromeos2-row9-rack9-host1.cros chromeos2-row9-rack9-host3.cros
-kefka : chromeos2-row9-rack9-host21.cros chromeos2-row10-rack9-host13.cros
-lulu : chromeos2-row9-rack9-host5.cros chromeos2-row9-rack9-host7.cros
-nautilus : chromeos2-row10-rack9-host9.cros chromeos2-row10-rack9-host11.cros
-snappy : chromeos2-row10-rack9-host5.cros chromeos2-row10-rack9-host7.cros
-veyron_tiger : chromeos2-row9-rack9-host17.cros chromeos2-row9-rack9-host19.cros
+bob : chromeos6-row4-rack13-host6.cros
+chell : chromeos2-row1-rack10-host2.cros chromeos2-row1-rack10-host4.cros
+coral : chromeos6-row5-rack6-host1.cros chromeos6-row5-rack6-host3.cros chromeos6-row5-rack6-host5.cros
+elm : chromeos6-row14-rack15-host21.cros
+kefka : chromeos6-row6-rack22-host2.cros chromeos6-row6-rack22-host3.cros chromeos6-row11-rack22-host7.cros
+nautilus : chromeos6-row5-rack10-host1.cros chromeos6-row5-rack10-host3.cros
+snappy : chromeos6-row3-rack20-host1.cros chromeos6-row3-rack20-host3.cros
+veyron_tiger : chromeos6-row3-rack7-host1.cros
diff --git a/crosperf/download_images.py b/crosperf/download_images.py
index 9bd4a8b..8e1bad1 100644
--- a/crosperf/download_images.py
+++ b/crosperf/download_images.py
@@ -137,8 +137,8 @@
def DownloadSingleFile(self, chromeos_root, build_id, package_file_name):
# Verify if package files exist
status = 0
- gs_package_name = (
- 'gs://chromeos-image-archive/%s/%s' % (build_id, package_file_name))
+ gs_package_name = ('gs://chromeos-image-archive/%s/%s' %
+ (build_id, package_file_name))
gsutil_cmd = os.path.join(chromeos_root, GS_UTIL)
if not test_flag.GetTestMode():
cmd = '%s ls %s' % (gsutil_cmd, gs_package_name)
@@ -171,8 +171,8 @@
uncompress_cmd):
# Uncompress file
download_path = os.path.join(chromeos_root, 'chroot/tmp', build_id)
- command = (
- 'cd %s ; %s %s' % (download_path, uncompress_cmd, package_file_name))
+ command = ('cd %s ; %s %s' %
+ (download_path, uncompress_cmd, package_file_name))
if self.log_level != 'verbose':
self._logger.LogOutput('CMD: %s' % command)
@@ -193,8 +193,8 @@
def VerifyFileExists(self, chromeos_root, build_id, package_file):
# Quickly verify if the files are there
status = 0
- gs_package_name = (
- 'gs://chromeos-image-archive/%s/%s' % (build_id, package_file))
+ gs_package_name = ('gs://chromeos-image-archive/%s/%s' %
+ (build_id, package_file))
gsutil_cmd = os.path.join(chromeos_root, GS_UTIL)
if not test_flag.GetTestMode():
cmd = '%s ls %s' % (gsutil_cmd, gs_package_name)
@@ -227,9 +227,9 @@
autotest_packages_name)
if status != 0:
default_autotest_dir = '/mnt/host/source/src/third_party/autotest/files'
- print(
- '(Warning: Could not find autotest packages .)\n'
- '(Warning: Defaulting autotest path to %s .' % default_autotest_dir)
+ print('(Warning: Could not find autotest packages .)\n'
+ '(Warning: Defaulting autotest path to %s .' %
+ default_autotest_dir)
return default_autotest_dir
# Files exist on server, download and uncompress them
@@ -281,22 +281,26 @@
self.UncompressSingleFile(chromeos_root, build_id, debug_archive_name,
'tar -xf ')
- # Rename created autotest directory to autotest_files
- command = ('cd %s ; mv debug debug_files' % download_path)
+ # Extract and move debug files into the proper location.
+ debug_dir = 'debug_files/usr/lib'
+ command = ('cd %s ; mkdir -p %s; mv debug %s' %
+ (download_path, debug_dir, debug_dir))
if self.log_level != 'verbose':
self._logger.LogOutput('CMD: %s' % command)
- print('(Moving downloaded debug files to debug_files)')
+ print('Moving downloaded debug files to %s' % debug_dir)
retval = self._ce.RunCommand(command)
if retval != 0:
- raise MissingFile('Could not create directory debug_files')
+ raise MissingFile('Could not create directory %s' %
+ os.path.join(debug_dir, 'debug'))
return debug_rel_path
def Run(self, chromeos_root, xbuddy_label, autotest_path, debug_path,
download_debug):
build_id = self.GetBuildID(chromeos_root, xbuddy_label)
- image_name = ('gs://chromeos-image-archive/%s/chromiumos_test_image.tar.xz'
- % build_id)
+ image_name = (
+ 'gs://chromeos-image-archive/%s/chromiumos_test_image.tar.xz' %
+ build_id)
# Verify that image exists for build_id, before attempting to
# download it.
diff --git a/crosperf/experiment.py b/crosperf/experiment.py
index 6e2efd4..e919f6e 100644
--- a/crosperf/experiment.py
+++ b/crosperf/experiment.py
@@ -29,7 +29,7 @@
cache_conditions, labels, benchmarks, experiment_file, email_to,
acquire_timeout, log_dir, log_level, share_cache,
results_directory, compress_results, locks_directory, cwp_dso,
- ignore_min_max, skylab, dut_config):
+ ignore_min_max, crosfleet, dut_config, no_lock: bool):
self.name = name
self.working_directory = working_directory
self.remote = remote
@@ -56,14 +56,15 @@
self.lock_mgr = None
self.cwp_dso = cwp_dso
self.ignore_min_max = ignore_min_max
- self.skylab = skylab
+ self.crosfleet = crosfleet
+ self.no_lock = no_lock
self.l = logger.GetLogger(log_dir)
if not self.benchmarks:
raise RuntimeError('No benchmarks specified')
if not self.labels:
raise RuntimeError('No labels specified')
- if not remote and not self.skylab:
+ if not remote and not self.crosfleet:
raise RuntimeError('No remote hosts specified')
# We need one chromeos_root to run the benchmarks in, but it doesn't
@@ -123,10 +124,11 @@
logger_to_use = logger.Logger(self.log_dir, 'run.%s' % (full_name),
True)
benchmark_runs.append(
- benchmark_run.BenchmarkRun(
- benchmark_run_name, benchmark, label, iteration,
- self.cache_conditions, self.machine_manager, logger_to_use,
- self.log_level, self.share_cache, dut_config))
+ benchmark_run.BenchmarkRun(benchmark_run_name, benchmark, label,
+ iteration, self.cache_conditions,
+ self.machine_manager, logger_to_use,
+ self.log_level, self.share_cache,
+ dut_config))
return benchmark_runs
@@ -223,6 +225,6 @@
m for m in self.locked_machines if m not in unlocked_machines
]
if failed_machines:
- raise RuntimeError(
- 'These machines are not unlocked correctly: %s' % failed_machines)
+ raise RuntimeError('These machines are not unlocked correctly: %s' %
+ failed_machines)
self.lock_mgr = None
diff --git a/crosperf/experiment_factory.py b/crosperf/experiment_factory.py
index 332f035..a9594a2 100644
--- a/crosperf/experiment_factory.py
+++ b/crosperf/experiment_factory.py
@@ -101,12 +101,14 @@
def AppendBenchmarkSet(self, benchmarks, benchmark_list, test_args,
iterations, rm_chroot_tmp, perf_args, suite,
- show_all_results, retries, run_local, cwp_dso, weight):
+ show_all_results, retries, run_local, cwp_dso,
+ weight):
"""Add all the tests in a set to the benchmarks list."""
for test_name in benchmark_list:
- telemetry_benchmark = Benchmark(
- test_name, test_name, test_args, iterations, rm_chroot_tmp, perf_args,
- suite, show_all_results, retries, run_local, cwp_dso, weight)
+ telemetry_benchmark = Benchmark(test_name, test_name, test_args,
+ iterations, rm_chroot_tmp, perf_args,
+ suite, show_all_results, retries,
+ run_local, cwp_dso, weight)
benchmarks.append(telemetry_benchmark)
def GetExperiment(self, experiment_file, working_directory, log_dir):
@@ -119,9 +121,10 @@
if log_level not in ('quiet', 'average', 'verbose'):
log_level = 'verbose'
- skylab = global_settings.GetField('skylab')
- # Check whether skylab tool is installed correctly for skylab mode.
- if skylab and not self.CheckSkylabTool(chromeos_root, log_level):
+ crosfleet = global_settings.GetField('crosfleet')
+ no_lock = bool(global_settings.GetField('no_lock'))
+ # Check whether crosfleet tool is installed correctly for crosfleet mode.
+ if crosfleet and not self.CheckCrosfleetTool(chromeos_root, log_level):
sys.exit(0)
remote = global_settings.GetField('remote')
@@ -256,33 +259,33 @@
if suite == 'telemetry_Crosperf':
if test_name == 'all_perfv2':
- self.AppendBenchmarkSet(benchmarks, telemetry_perfv2_tests, test_args,
- iterations, rm_chroot_tmp, perf_args, suite,
- show_all_results, retries, run_local, cwp_dso,
- weight)
+ self.AppendBenchmarkSet(benchmarks, telemetry_perfv2_tests,
+ test_args, iterations, rm_chroot_tmp,
+ perf_args, suite, show_all_results, retries,
+ run_local, cwp_dso, weight)
elif test_name == 'all_pagecyclers':
self.AppendBenchmarkSet(benchmarks, telemetry_pagecycler_tests,
test_args, iterations, rm_chroot_tmp,
perf_args, suite, show_all_results, retries,
run_local, cwp_dso, weight)
elif test_name == 'all_crosbolt_perf':
- self.AppendBenchmarkSet(
- benchmarks, telemetry_crosbolt_perf_tests, test_args, iterations,
- rm_chroot_tmp, perf_args, 'telemetry_Crosperf', show_all_results,
- retries, run_local, cwp_dso, weight)
- self.AppendBenchmarkSet(
- benchmarks,
- crosbolt_perf_tests,
- '',
- iterations,
- rm_chroot_tmp,
- perf_args,
- '',
- show_all_results,
- retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight)
+ self.AppendBenchmarkSet(benchmarks, telemetry_crosbolt_perf_tests,
+ test_args, iterations, rm_chroot_tmp,
+ perf_args, 'telemetry_Crosperf',
+ show_all_results, retries, run_local,
+ cwp_dso, weight)
+ self.AppendBenchmarkSet(benchmarks,
+ crosbolt_perf_tests,
+ '',
+ iterations,
+ rm_chroot_tmp,
+ perf_args,
+ '',
+ show_all_results,
+ retries,
+ run_local=False,
+ cwp_dso=cwp_dso,
+ weight=weight)
elif test_name == 'all_toolchain_perf':
self.AppendBenchmarkSet(benchmarks, telemetry_toolchain_perf_tests,
test_args, iterations, rm_chroot_tmp,
@@ -321,10 +324,11 @@
# cwp_dso=cwp_dso,
# weight=weight))
elif test_name == 'all_toolchain_perf_old':
- self.AppendBenchmarkSet(
- benchmarks, telemetry_toolchain_old_perf_tests, test_args,
- iterations, rm_chroot_tmp, perf_args, suite, show_all_results,
- retries, run_local, cwp_dso, weight)
+ self.AppendBenchmarkSet(benchmarks,
+ telemetry_toolchain_old_perf_tests,
+ test_args, iterations, rm_chroot_tmp,
+ perf_args, suite, show_all_results, retries,
+ run_local, cwp_dso, weight)
else:
benchmark = Benchmark(benchmark_name, test_name, test_args,
iterations, rm_chroot_tmp, perf_args, suite,
@@ -333,34 +337,32 @@
benchmarks.append(benchmark)
else:
if test_name == 'all_graphics_perf':
- self.AppendBenchmarkSet(
- benchmarks,
- graphics_perf_tests,
- '',
- iterations,
- rm_chroot_tmp,
- perf_args,
- '',
- show_all_results,
- retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight)
+ self.AppendBenchmarkSet(benchmarks,
+ graphics_perf_tests,
+ '',
+ iterations,
+ rm_chroot_tmp,
+ perf_args,
+ '',
+ show_all_results,
+ retries,
+ run_local=False,
+ cwp_dso=cwp_dso,
+ weight=weight)
else:
# Add the single benchmark.
- benchmark = Benchmark(
- benchmark_name,
- test_name,
- test_args,
- iterations,
- rm_chroot_tmp,
- perf_args,
- suite,
- show_all_results,
- retries,
- run_local=False,
- cwp_dso=cwp_dso,
- weight=weight)
+ benchmark = Benchmark(benchmark_name,
+ test_name,
+ test_args,
+ iterations,
+ rm_chroot_tmp,
+ perf_args,
+ suite,
+ show_all_results,
+ retries,
+ run_local=False,
+ cwp_dso=cwp_dso,
+ weight=weight)
benchmarks.append(benchmark)
if not benchmarks:
@@ -389,8 +391,9 @@
my_remote = new_remote
if image:
- if skylab:
- raise RuntimeError('In skylab mode, local image should not be used.')
+ if crosfleet:
+ raise RuntimeError(
+ 'In crosfleet mode, local image should not be used.')
if build:
raise RuntimeError('Image path and build are provided at the same '
'time, please use only one of them.')
@@ -406,8 +409,8 @@
# TODO(yunlian): We should consolidate code in machine_manager.py
# to derermine whether we are running from within google or not
- if ('corp.google.com' in socket.gethostname() and not my_remote and
- not skylab):
+ if ('corp.google.com' in socket.gethostname() and not my_remote
+ and not crosfleet):
my_remote = self.GetDefaultRemotes(board)
if global_settings.GetField('same_machine') and len(my_remote) > 1:
raise RuntimeError('Only one remote is allowed when same_machine '
@@ -418,12 +421,12 @@
# pylint: disable=too-many-function-args
label = MockLabel(label_name, build, image, autotest_path, debug_path,
chromeos_root, board, my_remote, image_args,
- cache_dir, cache_only, log_level, compiler, skylab,
- chrome_src)
+ cache_dir, cache_only, log_level, compiler,
+ crosfleet, chrome_src)
else:
label = Label(label_name, build, image, autotest_path, debug_path,
chromeos_root, board, my_remote, image_args, cache_dir,
- cache_only, log_level, compiler, skylab, chrome_src)
+ cache_only, log_level, compiler, crosfleet, chrome_src)
labels.append(label)
if not labels:
@@ -432,21 +435,36 @@
email = global_settings.GetField('email')
all_remote += list(set(my_remote))
all_remote = list(set(all_remote))
- if skylab:
+ if crosfleet:
for remote in all_remote:
- self.CheckRemotesInSkylab(remote)
- experiment = Experiment(experiment_name, all_remote, working_directory,
- chromeos_root, cache_conditions, labels, benchmarks,
- experiment_file.Canonicalize(), email,
- acquire_timeout, log_dir, log_level, share_cache,
- results_dir, compress_results, locks_dir, cwp_dso,
- ignore_min_max, skylab, dut_config)
+ self.CheckRemotesInCrosfleet(remote)
+ experiment = Experiment(experiment_name,
+ all_remote,
+ working_directory,
+ chromeos_root,
+ cache_conditions,
+ labels,
+ benchmarks,
+ experiment_file.Canonicalize(),
+ email,
+ acquire_timeout,
+ log_dir,
+ log_level,
+ share_cache,
+ results_dir,
+ compress_results,
+ locks_dir,
+ cwp_dso,
+ ignore_min_max,
+ crosfleet,
+ dut_config,
+ no_lock=no_lock)
return experiment
def GetDefaultRemotes(self, board):
- default_remotes_file = os.path.join(
- os.path.dirname(__file__), 'default_remotes')
+ default_remotes_file = os.path.join(os.path.dirname(__file__),
+ 'default_remotes')
try:
with open(default_remotes_file) as f:
for line in f:
@@ -464,26 +482,27 @@
else:
raise RuntimeError('There is no remote for {0}'.format(board))
- def CheckRemotesInSkylab(self, remote):
+ def CheckRemotesInCrosfleet(self, remote):
# TODO: (AI:zhizhouy) need to check whether a remote is a local or lab
# machine. If not lab machine, raise an error.
pass
- def CheckSkylabTool(self, chromeos_root, log_level):
- SKYLAB_PATH = '/usr/local/bin/skylab'
- if os.path.exists(SKYLAB_PATH):
+ def CheckCrosfleetTool(self, chromeos_root, log_level):
+ CROSFLEET_PATH = 'crosfleet'
+ if os.path.exists(CROSFLEET_PATH):
return True
l = logger.GetLogger()
- l.LogOutput('Skylab tool not installed, trying to install it.')
+ l.LogOutput('Crosfleet tool not installed, trying to install it.')
ce = command_executer.GetCommandExecuter(l, log_level=log_level)
- setup_lab_tools = os.path.join(chromeos_root, 'chromeos-admin', 'lab-tools',
- 'setup_lab_tools')
+ setup_lab_tools = os.path.join(chromeos_root, 'chromeos-admin',
+ 'lab-tools', 'setup_lab_tools')
cmd = '%s' % setup_lab_tools
status = ce.RunCommand(cmd)
if status != 0:
- raise RuntimeError('Skylab tool not installed correctly, please try to '
- 'manually install it from %s' % setup_lab_tools)
- l.LogOutput('Skylab is installed at %s, please login before first use. '
- 'Login by running "skylab login" and follow instructions.' %
- SKYLAB_PATH)
+ raise RuntimeError(
+ 'Crosfleet tool not installed correctly, please try to '
+ 'manually install it from %s' % setup_lab_tools)
+ l.LogOutput('Crosfleet is installed at %s, please login before first use. '
+ 'Login by running "crosfleet login" and follow instructions.' %
+ CROSFLEET_PATH)
return False
diff --git a/crosperf/experiment_factory_unittest.py b/crosperf/experiment_factory_unittest.py
index 3528eb1..9637c10 100755
--- a/crosperf/experiment_factory_unittest.py
+++ b/crosperf/experiment_factory_unittest.py
@@ -79,14 +79,14 @@
class ExperimentFactoryTest(unittest.TestCase):
"""Class for running experiment factory unittests."""
-
def setUp(self):
self.append_benchmark_call_args = []
def testLoadExperimentFile1(self):
experiment_file = ExperimentFile(io.StringIO(EXPERIMENT_FILE_1))
- exp = ExperimentFactory().GetExperiment(
- experiment_file, working_directory='', log_dir='')
+ exp = ExperimentFactory().GetExperiment(experiment_file,
+ working_directory='',
+ log_dir='')
self.assertEqual(exp.remote, ['chromeos-alex3'])
self.assertEqual(len(exp.benchmarks), 2)
@@ -104,8 +104,9 @@
def testLoadExperimentFile2CWP(self):
experiment_file = ExperimentFile(io.StringIO(EXPERIMENT_FILE_2))
- exp = ExperimentFactory().GetExperiment(
- experiment_file, working_directory='', log_dir='')
+ exp = ExperimentFactory().GetExperiment(experiment_file,
+ working_directory='',
+ log_dir='')
self.assertEqual(exp.cwp_dso, 'kallsyms')
self.assertEqual(len(exp.benchmarks), 2)
self.assertEqual(exp.benchmarks[0].weight, 0.8)
@@ -240,11 +241,12 @@
ef = ExperimentFactory()
bench_list = []
- ef.AppendBenchmarkSet(bench_list, experiment_factory.telemetry_perfv2_tests,
- '', 1, False, '', 'telemetry_Crosperf', False, 0,
- False, '', 0)
- self.assertEqual(
- len(bench_list), len(experiment_factory.telemetry_perfv2_tests))
+ ef.AppendBenchmarkSet(bench_list,
+ experiment_factory.telemetry_perfv2_tests, '', 1,
+ False, '', 'telemetry_Crosperf', False, 0, False, '',
+ 0)
+ self.assertEqual(len(bench_list),
+ len(experiment_factory.telemetry_perfv2_tests))
self.assertTrue(isinstance(bench_list[0], benchmark.Benchmark))
bench_list = []
@@ -252,17 +254,17 @@
experiment_factory.telemetry_pagecycler_tests, '', 1,
False, '', 'telemetry_Crosperf', False, 0, False, '',
0)
- self.assertEqual(
- len(bench_list), len(experiment_factory.telemetry_pagecycler_tests))
+ self.assertEqual(len(bench_list),
+ len(experiment_factory.telemetry_pagecycler_tests))
self.assertTrue(isinstance(bench_list[0], benchmark.Benchmark))
bench_list = []
ef.AppendBenchmarkSet(bench_list,
- experiment_factory.telemetry_toolchain_perf_tests, '',
- 1, False, '', 'telemetry_Crosperf', False, 0, False,
- '', 0)
- self.assertEqual(
- len(bench_list), len(experiment_factory.telemetry_toolchain_perf_tests))
+ experiment_factory.telemetry_toolchain_perf_tests,
+ '', 1, False, '', 'telemetry_Crosperf', False, 0,
+ False, '', 0)
+ self.assertEqual(len(bench_list),
+ len(experiment_factory.telemetry_toolchain_perf_tests))
self.assertTrue(isinstance(bench_list[0], benchmark.Benchmark))
@mock.patch.object(socket, 'gethostname')
@@ -370,7 +372,8 @@
global_settings.SetField('same_machine', 'true')
global_settings.SetField('same_specs', 'true')
- self.assertRaises(Exception, ef.GetExperiment, mock_experiment_file, '', '')
+ self.assertRaises(Exception, ef.GetExperiment, mock_experiment_file, '',
+ '')
label_settings.SetField('remote', '')
global_settings.SetField('remote', '123.45.67.89')
exp = ef.GetExperiment(mock_experiment_file, '', '')
@@ -399,46 +402,42 @@
def test_get_default_remotes(self):
board_list = [
- 'elm', 'bob', 'chell', 'kefka', 'lulu', 'nautilus', 'snappy',
+ 'bob', 'chell', 'coral', 'elm', 'kefka', 'nautilus', 'snappy',
'veyron_tiger'
]
ef = ExperimentFactory()
self.assertRaises(Exception, ef.GetDefaultRemotes, 'bad-board')
- # Verify that we have entries for every board, and that we get at least
- # two machines for each board.
+ # Verify that we have entries for every board
for b in board_list:
remotes = ef.GetDefaultRemotes(b)
- if b == 'daisy':
- self.assertEqual(len(remotes), 1)
- else:
- self.assertGreaterEqual(len(remotes), 2)
+ self.assertGreaterEqual(len(remotes), 1)
@mock.patch.object(command_executer.CommandExecuter, 'RunCommand')
@mock.patch.object(os.path, 'exists')
- def test_check_skylab_tool(self, mock_exists, mock_runcmd):
+ def test_check_crosfleet_tool(self, mock_exists, mock_runcmd):
ef = ExperimentFactory()
chromeos_root = '/tmp/chromeos'
log_level = 'average'
mock_exists.return_value = True
- ret = ef.CheckSkylabTool(chromeos_root, log_level)
+ ret = ef.CheckCrosfleetTool(chromeos_root, log_level)
self.assertTrue(ret)
mock_exists.return_value = False
mock_runcmd.return_value = 1
with self.assertRaises(RuntimeError) as err:
- ef.CheckSkylabTool(chromeos_root, log_level)
+ ef.CheckCrosfleetTool(chromeos_root, log_level)
self.assertEqual(mock_runcmd.call_count, 1)
self.assertEqual(
- str(err.exception), 'Skylab tool not installed '
+ str(err.exception), 'Crosfleet tool not installed '
'correctly, please try to manually install it from '
'/tmp/chromeos/chromeos-admin/lab-tools/setup_lab_tools')
mock_runcmd.return_value = 0
mock_runcmd.call_count = 0
- ret = ef.CheckSkylabTool(chromeos_root, log_level)
+ ret = ef.CheckCrosfleetTool(chromeos_root, log_level)
self.assertEqual(mock_runcmd.call_count, 1)
self.assertFalse(ret)
diff --git a/crosperf/experiment_runner.py b/crosperf/experiment_runner.py
index 8ba85a4..6daef78 100644
--- a/crosperf/experiment_runner.py
+++ b/crosperf/experiment_runner.py
@@ -107,15 +107,15 @@
"""Get where is the machine from.
Returns:
- The location of the machine: local or skylab
+ The location of the machine: local or crosfleet
"""
# We assume that lab machine always starts with chromeos*, and local
# machines are ip address.
if 'chromeos' in machine:
- if lock_mgr.CheckMachineInSkylab(machine):
- return 'skylab'
+ if lock_mgr.CheckMachineInCrosfleet(machine):
+ return 'crosfleet'
else:
- raise RuntimeError('Lab machine not in Skylab.')
+ raise RuntimeError('Lab machine not in Crosfleet.')
return 'local'
def _LockAllMachines(self, experiment):
@@ -125,7 +125,7 @@
in three different modes automatically, to prevent any other crosperf runs
from being able to update/use the machines while this experiment is
running:
- - Skylab machines: Use skylab lease-dut mechanism to lease
+ - Crosfleet machines: Use crosfleet lease-dut mechanism to lease
- Local machines: Use file lock mechanism to lock
"""
if test_flag.GetTestMode():
@@ -143,8 +143,8 @@
machine_type = self._GetMachineType(experiment.lock_mgr, m)
if machine_type == 'local':
experiment.lock_mgr.AddMachineToLocal(m)
- elif machine_type == 'skylab':
- experiment.lock_mgr.AddMachineToSkylab(m)
+ elif machine_type == 'crosfleet':
+ experiment.lock_mgr.AddMachineToCrosfleet(m)
machine_states = experiment.lock_mgr.GetMachineStates('lock')
experiment.lock_mgr.CheckMachineLocks(machine_states, 'lock')
self.locked_machines = experiment.lock_mgr.UpdateMachines(True)
@@ -160,8 +160,8 @@
cache.Init(br.label.chromeos_image, br.label.chromeos_root,
br.benchmark.test_name, br.iteration, br.test_args,
br.profiler_args, br.machine_manager, br.machine,
- br.label.board, br.cache_conditions, br.logger(), br.log_level,
- br.label, br.share_cache, br.benchmark.suite,
+ br.label.board, br.cache_conditions, br.logger(),
+ br.log_level, br.label, br.share_cache, br.benchmark.suite,
br.benchmark.show_all_results, br.benchmark.run_local,
br.benchmark.cwp_dso)
cache_dir = cache.GetCacheDirForWrite()
@@ -171,12 +171,12 @@
def _Run(self, experiment):
try:
- # We should not lease machines if tests are launched via `skylab
- # create-test`. This is because leasing DUT in skylab will create a
+ # We should not lease machines if tests are launched via `crosfleet
+ # create-test`. This is because leasing DUT in crosfleet will create a
# no-op task on the DUT and new test created will be hanging there.
# TODO(zhizhouy): Need to check whether machine is ready or not before
# assigning a test to it.
- if not experiment.skylab:
+ if not experiment.no_lock and not experiment.crosfleet:
self._LockAllMachines(experiment)
# Calculate all checksums of avaiable/locked machines, to ensure same
# label has same machines for testing
@@ -236,8 +236,8 @@
if not benchmark_run.cache_hit:
send_mail = True
break
- if (not send_mail and not experiment.email_to or
- config.GetConfig('no_email')):
+ if (not send_mail and not experiment.email_to
+ or config.GetConfig('no_email')):
return
label_names = []
@@ -245,7 +245,8 @@
label_names.append(label.name)
subject = '%s: %s' % (experiment.name, ' vs. '.join(label_names))
- text_report = TextResultsReport.FromExperiment(experiment, True).GetReport()
+ text_report = TextResultsReport.FromExperiment(experiment,
+ True).GetReport()
text_report += ('\nResults are stored in %s.\n' %
experiment.results_directory)
text_report = "<pre style='font-size: 13px'>%s</pre>" % text_report
@@ -253,12 +254,11 @@
attachment = EmailSender.Attachment('report.html', html_report)
email_to = experiment.email_to or []
email_to.append(getpass.getuser())
- EmailSender().SendEmail(
- email_to,
- subject,
- text_report,
- attachments=[attachment],
- msg_type='html')
+ EmailSender().SendEmail(email_to,
+ subject,
+ text_report,
+ attachments=[attachment],
+ msg_type='html')
def _StoreResults(self, experiment):
if self._terminated:
@@ -300,9 +300,10 @@
self.l.LogOutput('Storing results of each benchmark run.')
for benchmark_run in experiment.benchmark_runs:
if benchmark_run.result:
- benchmark_run_name = ''.join(
- ch for ch in benchmark_run.name if ch.isalnum())
- benchmark_run_path = os.path.join(results_directory, benchmark_run_name)
+ benchmark_run_name = ''.join(ch for ch in benchmark_run.name
+ if ch.isalnum())
+ benchmark_run_path = os.path.join(results_directory,
+ benchmark_run_name)
if experiment.compress_results:
benchmark_run.result.CompressResultsTo(benchmark_run_path)
else:
@@ -313,15 +314,16 @@
results_table_path = os.path.join(results_directory, 'results.html')
report = HTMLResultsReport.FromExperiment(experiment).GetReport()
if self.json_report:
- json_report = JSONResultsReport.FromExperiment(
- experiment, json_args={'indent': 2})
+ json_report = JSONResultsReport.FromExperiment(experiment,
+ json_args={'indent': 2})
_WriteJSONReportToFile(experiment, results_directory, json_report)
FileUtils().WriteFile(results_table_path, report)
self.l.LogOutput('Storing email message body in %s.' % results_directory)
msg_file_path = os.path.join(results_directory, 'msg_body.html')
- text_report = TextResultsReport.FromExperiment(experiment, True).GetReport()
+ text_report = TextResultsReport.FromExperiment(experiment,
+ True).GetReport()
text_report += ('\nResults are stored in %s.\n' %
experiment.results_directory)
msg_body = "<pre style='font-size: 13px'>%s</pre>" % text_report
diff --git a/crosperf/label.py b/crosperf/label.py
index a55d663..30bf5f8 100644
--- a/crosperf/label.py
+++ b/crosperf/label.py
@@ -32,7 +32,7 @@
cache_only,
log_level,
compiler,
- skylab=False,
+ crosfleet=False,
chrome_src=None):
self.image_type = self._GetImageType(chromeos_image)
@@ -55,7 +55,7 @@
self.log_level = log_level
self.chrome_version = ''
self.compiler = compiler
- self.skylab = skylab
+ self.crosfleet = crosfleet
if not chromeos_root:
if self.image_type == 'local':
@@ -153,7 +153,7 @@
cache_only,
log_level,
compiler,
- skylab=False,
+ crosfleet=False,
chrome_src=None):
self.name = name
self.build = build
@@ -174,7 +174,7 @@
self.checksum = ''
self.log_level = log_level
self.compiler = compiler
- self.skylab = skylab
+ self.crosfleet = crosfleet
self.chrome_version = 'Fake Chrome Version 50'
def _GetImageType(self, chromeos_image):
diff --git a/crosperf/mock_instance.py b/crosperf/mock_instance.py
index 842d634..f44ed87 100644
--- a/crosperf/mock_instance.py
+++ b/crosperf/mock_instance.py
@@ -25,7 +25,7 @@
cache_only=False,
log_level='average',
compiler='gcc',
- skylab=False,
+ crosfleet=False,
chrome_src=None)
label2 = MockLabel(
@@ -42,7 +42,7 @@
cache_only=False,
log_level='average',
compiler='gcc',
- skylab=False,
+ crosfleet=False,
chrome_src=None)
benchmark1 = Benchmark('benchmark1', 'autotest_name_1', 'autotest_args', 2, '',
diff --git a/crosperf/results_cache.py b/crosperf/results_cache.py
index c5c8594..5525858 100644
--- a/crosperf/results_cache.py
+++ b/crosperf/results_cache.py
@@ -27,7 +27,7 @@
import test_flag
SCRATCH_DIR = os.path.expanduser('~/cros_scratch')
-RESULTS_FILE = 'results.txt'
+RESULTS_FILE = 'results.pickle'
MACHINE_FILE = 'machine.txt'
AUTOTEST_TARBALL = 'autotest.tbz2'
RESULTS_TARBALL = 'results.tbz2'
@@ -129,6 +129,7 @@
ret = self.ce.CopyFiles(file_to_copy, dest_file, recursive=False)
if ret:
raise IOError('Could not copy results file: %s' % file_to_copy)
+ file_index += 1
def CopyResultsTo(self, dest_dir):
self.CopyFilesTo(dest_dir, self.results_file)
@@ -196,9 +197,9 @@
keyvals_dict[key] = result_dict['value']
elif 'values' in result_dict:
values = result_dict['values']
- if ('type' in result_dict and
- result_dict['type'] == 'list_of_scalar_values' and values and
- values != 'null'):
+ if ('type' in result_dict
+ and result_dict['type'] == 'list_of_scalar_values' and values
+ and values != 'null'):
keyvals_dict[key] = sum(values) / float(len(values))
else:
keyvals_dict[key] = values
@@ -244,13 +245,14 @@
results_in_chroot = os.path.join(self.chromeos_root, 'chroot', 'tmp')
if not self.temp_dir:
self.temp_dir = tempfile.mkdtemp(dir=results_in_chroot)
- command = 'cp -r {0}/* {1}'.format(self.results_dir, self.temp_dir)
+ command = f'cp -r {self.results_dir}/* {self.temp_dir}'
self.ce.RunCommand(command, print_to_console=False)
command = ('./generate_test_report --no-color --csv %s' %
(os.path.join('/tmp', os.path.basename(self.temp_dir))))
- _, out, _ = self.ce.ChrootRunCommandWOutput(
- self.chromeos_root, command, print_to_console=False)
+ _, out, _ = self.ce.ChrootRunCommandWOutput(self.chromeos_root,
+ command,
+ print_to_console=False)
keyvals_dict = {}
tmp_dir_in_chroot = misc.GetInsideChrootPath(self.chromeos_root,
self.temp_dir)
@@ -272,7 +274,7 @@
return keyvals_dict
def GetSamples(self):
- samples = 0
+ actual_samples = 0
for perf_data_file in self.perf_data_files:
chroot_perf_data_file = misc.GetInsideChrootPath(self.chromeos_root,
perf_data_file)
@@ -303,17 +305,53 @@
# Each line looks like this:
# 45.42% 237210 chrome
# And we want the second number which is the sample count.
- sample = 0
+ samples = 0
try:
for line in result.split('\n'):
attr = line.split()
if len(attr) == 3 and '%' in attr[0]:
- sample += int(attr[1])
+ samples += int(attr[1])
except:
raise RuntimeError('Cannot parse perf dso result')
- samples += sample
- return [samples, u'samples']
+ actual_samples += samples
+
+ # Remove idle cycles from the accumulated sample count.
+ perf_report_file = f'{perf_data_file}.report'
+ if not os.path.exists(perf_report_file):
+ raise RuntimeError(f'Missing perf report file: {perf_report_file}')
+
+ idle_functions = {
+ '[kernel.kallsyms]':
+ ('intel_idle', 'arch_cpu_idle', 'intel_idle', 'cpu_startup_entry',
+ 'default_idle', 'cpu_idle_loop', 'do_idle'),
+ }
+ idle_samples = 0
+
+ with open(perf_report_file) as f:
+ try:
+ for line in f:
+ line = line.strip()
+ if not line or line[0] == '#':
+ continue
+ # Each line has the following fields,
+ # pylint: disable=line-too-long
+ # Overhead Samples Command Shared Object Symbol
+ # pylint: disable=line-too-long
+ # 1.48% 60 swapper [kernel.kallsyms] [k] intel_idle
+ # pylint: disable=line-too-long
+ # 0.00% 1 shill libshill-net.so [.] std::__1::vector<unsigned char, std::__1::allocator<unsigned char> >::vector<unsigned char const*>
+ _, samples, _, dso, _, function = line.split(None, 5)
+
+ if dso in idle_functions and function in idle_functions[dso]:
+ if self.log_level != 'verbose':
+ self._logger.LogOutput('Removing %s samples from %s in %s' %
+ (samples, function, dso))
+ idle_samples += int(samples)
+ except:
+ raise RuntimeError('Cannot parse perf report')
+ actual_samples -= idle_samples
+ return [actual_samples, u'samples']
def GetResultsDir(self):
if self.suite == 'tast':
@@ -350,11 +388,11 @@
result = self.FindFilesInResultsDir('-name perf_measurements').splitlines()
if not result:
if self.suite == 'telemetry_Crosperf':
- result = \
- self.FindFilesInResultsDir('-name histograms.json').splitlines()
+ result = (
+ self.FindFilesInResultsDir('-name histograms.json').splitlines())
else:
- result = \
- self.FindFilesInResultsDir('-name results-chart.json').splitlines()
+ result = (self.FindFilesInResultsDir(
+ '-name results-chart.json').splitlines())
return result
def GetTurbostatFile(self):
@@ -412,7 +450,8 @@
if debug_path:
symfs = '--symfs ' + debug_path
- vmlinux = '--vmlinux ' + os.path.join(debug_path, 'boot', 'vmlinux')
+ vmlinux = '--vmlinux ' + os.path.join(debug_path, 'usr', 'lib',
+ 'debug', 'boot', 'vmlinux')
kallsyms = ''
print('** WARNING **: --kallsyms option not applied, no System.map-* '
'for downloaded image.')
@@ -508,9 +547,9 @@
values = value_dict['values']
if not values:
continue
- if ('type' in value_dict and
- value_dict['type'] == 'list_of_scalar_values' and
- values != 'null'):
+ if ('type' in value_dict
+ and value_dict['type'] == 'list_of_scalar_values'
+ and values != 'null'):
result = sum(values) / float(len(values))
else:
result = values
@@ -708,8 +747,9 @@
# order.
heapq.heappush(cmd_top5_cpu_use[cmd_with_pid], round(cpu_use, 1))
- for consumer, usage in sorted(
- cmd_total_cpu_use.items(), key=lambda x: x[1], reverse=True):
+ for consumer, usage in sorted(cmd_total_cpu_use.items(),
+ key=lambda x: x[1],
+ reverse=True):
# Iterate through commands by descending order of total CPU usage.
topcmd = {
'cmd': consumer,
@@ -875,7 +915,8 @@
self.chromeos_root, path_str)
if status:
# Error of reading a perf.data profile is fatal.
- raise PerfDataReadError(f'Failed to read perf.data profile: {path_str}')
+ raise PerfDataReadError(
+ f'Failed to read perf.data profile: {path_str}')
# Pattern to search a line with "perf record" command line:
# # cmdline : /usr/bin/perf record -e instructions -p 123"
@@ -900,7 +941,8 @@
break
else:
# cmdline wasn't found in the header. It's a fatal error.
- raise PerfDataReadError(f'Perf command line is not found in {path_str}')
+ raise PerfDataReadError(
+ f'Perf command line is not found in {path_str}')
return pids
def VerifyPerfDataPID(self):
@@ -938,11 +980,11 @@
# Note that this function doesn't know anything about whether there is a
# cache hit or miss. It should process results agnostic of the cache hit
# state.
- if (self.results_file and self.suite == 'telemetry_Crosperf' and
- 'histograms.json' in self.results_file[0]):
+ if (self.results_file and self.suite == 'telemetry_Crosperf'
+ and 'histograms.json' in self.results_file[0]):
self.keyvals = self.ProcessHistogramsResults()
- elif (self.results_file and self.suite != 'telemetry_Crosperf' and
- 'results-chart.json' in self.results_file[0]):
+ elif (self.results_file and self.suite != 'telemetry_Crosperf'
+ and 'results-chart.json' in self.results_file[0]):
self.keyvals = self.ProcessChartResults()
else:
if not use_cache:
@@ -1096,15 +1138,16 @@
f.write(machine_manager.machine_checksum_string[self.label.name])
if os.path.exists(cache_dir):
- command = 'rm -rf {0}'.format(cache_dir)
+ command = f'rm -rf {cache_dir}'
self.ce.RunCommand(command)
- command = 'mkdir -p {0} && '.format(os.path.dirname(cache_dir))
- command += 'chmod g+x {0} && '.format(temp_dir)
- command += 'mv {0} {1}'.format(temp_dir, cache_dir)
+ parent_dir = os.path.dirname(cache_dir)
+ command = f'mkdir -p {parent_dir} && '
+ command += f'chmod g+x {temp_dir} && '
+ command += f'mv {temp_dir} {cache_dir}'
ret = self.ce.RunCommand(command)
if ret:
- command = 'rm -rf {0}'.format(temp_dir)
+ command = f'rm -rf {temp_dir}'
self.ce.RunCommand(command)
raise RuntimeError('Could not move dir %s to dir %s' %
(temp_dir, cache_dir))
@@ -1203,8 +1246,8 @@
self.err = pickle.load(f)
self.retval = pickle.load(f)
- self.chrome_version = \
- super(TelemetryResult, self).GetChromeVersionFromCache(cache_dir)
+ self.chrome_version = (super(TelemetryResult,
+ self).GetChromeVersionFromCache(cache_dir))
self.ProcessResults()
@@ -1266,10 +1309,10 @@
self.run_local = None
self.cwp_dso = None
- def Init(self, chromeos_image, chromeos_root, test_name, iteration, test_args,
- profiler_args, machine_manager, machine, board, cache_conditions,
- logger_to_use, log_level, label, share_cache, suite,
- show_all_results, run_local, cwp_dso):
+ def Init(self, chromeos_image, chromeos_root, test_name, iteration,
+ test_args, profiler_args, machine_manager, machine, board,
+ cache_conditions, logger_to_use, log_level, label, share_cache,
+ suite, show_all_results, run_local, cwp_dso):
self.chromeos_image = chromeos_image
self.chromeos_root = chromeos_root
self.test_name = test_name
@@ -1281,8 +1324,8 @@
self.machine_manager = machine_manager
self.machine = machine
self._logger = logger_to_use
- self.ce = command_executer.GetCommandExecuter(
- self._logger, log_level=log_level)
+ self.ce = command_executer.GetCommandExecuter(self._logger,
+ log_level=log_level)
self.label = label
self.share_cache = share_cache
self.suite = suite
@@ -1368,15 +1411,16 @@
temp_test_args = '%s %s %s' % (self.test_args, self.profiler_args,
self.run_local)
- test_args_checksum = hashlib.md5(temp_test_args.encode('utf-8')).hexdigest()
+ test_args_checksum = hashlib.md5(
+ temp_test_args.encode('utf-8')).hexdigest()
return (image_path_checksum, self.test_name, str(self.iteration),
- test_args_checksum, checksum, machine_checksum, machine_id_checksum,
- str(self.CACHE_VERSION))
+ test_args_checksum, checksum, machine_checksum,
+ machine_id_checksum, str(self.CACHE_VERSION))
def ReadResult(self):
if CacheConditions.FALSE in self.cache_conditions:
cache_dir = self.GetCacheDirForWrite()
- command = 'rm -rf %s' % (cache_dir,)
+ command = 'rm -rf %s' % (cache_dir, )
self.ce.RunCommand(command)
return None
cache_dir = self.GetCacheDirForRead()
@@ -1389,14 +1433,15 @@
if self.log_level == 'verbose':
self._logger.LogOutput('Trying to read from cache dir: %s' % cache_dir)
- result = Result.CreateFromCacheHit(self._logger, self.log_level, self.label,
- self.machine, cache_dir, self.test_name,
- self.suite, self.cwp_dso)
+ result = Result.CreateFromCacheHit(self._logger, self.log_level,
+ self.label, self.machine, cache_dir,
+ self.test_name, self.suite,
+ self.cwp_dso)
if not result:
return None
- if (result.retval == 0 or
- CacheConditions.RUN_SUCCEEDED not in self.cache_conditions):
+ if (result.retval == 0
+ or CacheConditions.RUN_SUCCEEDED not in self.cache_conditions):
return result
return None
diff --git a/crosperf/results_cache_unittest.py b/crosperf/results_cache_unittest.py
index 91ceed2..d6953ee 100755
--- a/crosperf/results_cache_unittest.py
+++ b/crosperf/results_cache_unittest.py
@@ -9,7 +9,9 @@
from __future__ import print_function
+import io
import os
+import pickle
import shutil
import tempfile
import unittest
@@ -30,6 +32,8 @@
from cros_utils import logger
from cros_utils import misc
+# The following hardcoded string has blocked words replaced, and thus
+# is not representative of a true crosperf output.
# pylint: disable=line-too-long
OUTPUT = """CMD (True): ./test_that.sh\
--remote=172.17.128.241 --board=lumpy LibCBench
@@ -41,13 +45,13 @@
INFO : * 'client/site_tests/platform_LibCBench/control'
INFO : Running client test client/site_tests/platform_LibCBench/control
-./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/test_that.PO1234567/platform_LibCBench --test-retry=0 --args
+./server/autoserv -m 172.17.128.241 --ssh-port 22 -c client/site_tests/platform_LibCBench/control -r /tmp/test_that.PO1234567/platform_LibCBench --test-retry=0 --args
ERROR:root:import statsd failed, no stats will be reported.
14:20:22 INFO | Results placed in /tmp/test_that.PO1234567/platform_LibCBench
14:20:22 INFO | Processing control file
-14:20:23 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
+14:20:23 INFO | Starting main ssh connection '/usr/bin/ssh -a -x -N -o ControlMain=yes -o ControlPath=/tmp/_autotmp_VIIP67ssh-main/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
14:20:23 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts.
-14:20:23 INFO | INFO ---- ---- kernel=3.8.11 localtime=May 22 14:20:23 timestamp=1369257623
+14:20:23 INFO | INFO\t----\t----\tkernel=3.8.11\tlocaltime=May 22 14:20:23\ttimestamp=1369257623
14:20:23 INFO | Installing autotest on 172.17.128.241
14:20:23 INFO | Using installation dir /usr/local/autotest
14:20:23 WARNI| No job_repo_url for <remote host: 172.17.128.241>
@@ -58,11 +62,11 @@
14:20:24 INFO | Entered autotestd_monitor.
14:20:24 INFO | Finished launching tail subprocesses.
14:20:24 INFO | Finished waiting on autotestd to start.
-14:20:26 INFO | START ---- ---- timestamp=1369257625 localtime=May 22 14:20:25
-14:20:26 INFO | START platform_LibCBench platform_LibCBench timestamp=1369257625 localtime=May 22 14:20:25
-14:20:30 INFO | GOOD platform_LibCBench platform_LibCBench timestamp=1369257630 localtime=May 22 14:20:30 completed successfully
-14:20:30 INFO | END GOOD platform_LibCBench platform_LibCBench timestamp=1369257630 localtime=May 22 14:20:30
-14:20:31 INFO | END GOOD ---- ---- timestamp=1369257630 localtime=May 22 14:20:30
+14:20:26 INFO | START\t----\t----\ttimestamp=1369257625\tlocaltime=May 22 14:20:25
+14:20:26 INFO | \tSTART\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257625\tlocaltime=May 22 14:20:25
+14:20:30 INFO | \t\tGOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30\tcompleted successfully
+14:20:30 INFO | \tEND GOOD\tplatform_LibCBench\tplatform_LibCBench\ttimestamp=1369257630\tlocaltime=May 22 14:20:30
+14:20:31 INFO | END GOOD\t----\t----\ttimestamp=1369257630\tlocaltime=May 22 14:20:30
14:20:31 INFO | Got lock of exit_code_file.
14:20:31 INFO | Released lock of exit_code_file and closed it.
OUTPUT: ==============================
@@ -71,14 +75,14 @@
OUTPUT: Thread Status:
RUNNING: 1 ('ttt: LibCBench (1)' 0:01:21)
Machine Status:
-Machine Thread Lock Status Checksum
+Machine Thread Lock Status Checksum
172.17.128.241 ttt: LibCBench (1) True RUNNING 3ba9f2ecbb222f20887daea5583d86ba
OUTPUT: ==============================
14:20:33 INFO | Killing child processes.
14:20:33 INFO | Client complete
14:20:33 INFO | Finished processing control file
-14:20:33 INFO | Starting master ssh connection '/usr/bin/ssh -a -x -N -o ControlMaster=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-master/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
+14:20:33 INFO | Starting main ssh connection '/usr/bin/ssh -a -x -N -o ControlMain=yes -o ControlPath=/tmp/_autotmp_aVJUgmssh-main/socket -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes -o ConnectTimeout=30 -o ServerAliveInterval=180 -o ServerAliveCountMax=3 -o ConnectionAttempts=4 -o Protocol=2 -l root -p 22 172.17.128.241'
14:20:33 ERROR| [stderr] Warning: Permanently added '172.17.128.241' (RSA) to the list of known hosts.
INFO : Test results:
@@ -115,7 +119,7 @@
-------------------------------------------------------------------
Total PASS: 2/2 (100%)
-INFO : Elapsed time: 0m16s
+INFO : Elapsed time: 0m16s
"""
error = """
@@ -176,7 +180,7 @@
# total memory : 5911496 kB
# cmdline : /usr/bin/perf record -e instructions -p {pid}
# event : name = instructions, , id = ( 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193 ), type = 8, size = 112
-# event : name = dummy:u, , id = ( 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204 ), type = 1, size = 112, config = 0x9
+# event : name = placeholder:u, , id = ( 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204 ), type = 1, size = 112, config = 0x9
# CPU_TOPOLOGY info available, use -I to display
# pmu mappings: software = 1, uprobe = 6, cs_etm = 8, breakpoint = 5, tracepoint = 2, armv8_pmuv3 = 7
# contains AUX area data (e.g. instruction trace)
@@ -188,8 +192,8 @@
#
"""
-TURBOSTAT_LOG_OUTPUT = \
-"""CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
+TURBOSTAT_LOG_OUTPUT = (
+ """CPU Avg_MHz Busy% Bzy_MHz TSC_MHz IRQ CoreTmp
- 329 12.13 2723 2393 10975 77
0 336 12.41 2715 2393 6328 77
2 323 11.86 2731 2393 4647 69
@@ -217,7 +221,7 @@
- 843 29.83 2832 2393 28161 47
0 827 29.35 2826 2393 16093 47
2 858 30.31 2838 2393 12068 46
-"""
+""")
TURBOSTAT_DATA = {
'cpufreq': {
'all': [2723, 2884, 2927, 2937, 2932, 2933, 2832]
@@ -227,8 +231,7 @@
},
}
-TOP_LOG = \
-"""
+TOP_LOG = ("""
PID USER PR NI VIRT RES SHR S %CPU %MEM TIME+ COMMAND
4102 chronos 12 -8 3454472 238300 118188 R 41.8 6.1 0:08.37 chrome
4204 chronos 12 -8 2492716 205728 179016 S 11.8 5.3 0:03.89 chrome
@@ -250,7 +253,7 @@
5713 chronos 20 0 5178652 103120 50372 S 17.8 2.6 0:01.13 chrome
7 root 20 0 0 0 0 S 1.0 0.0 0:00.73 rcu_preempt
855 root 20 0 0 0 0 S 1.0 0.0 0:00.01 kworker/4:2
-"""
+""")
TOP_DATA = [
{
'cmd': 'chrome-5745',
@@ -301,8 +304,7 @@
'top5_cpu_use': [1.0],
},
]
-TOP_OUTPUT = \
-""" COMMAND AVG CPU% SEEN HIGHEST 5
+TOP_OUTPUT = (""" COMMAND AVG CPU% SEEN HIGHEST 5
chrome 128.250000 6 [122.8, 107.9, 17.8, 5.0, 2.0]
irq/230-cros-ec 1.000000 1 [2.0]
sshd 0.500000 1 [1.0]
@@ -310,10 +312,9 @@
spi5 0.500000 1 [1.0]
rcu_preempt 0.500000 1 [1.0]
kworker/4:2 0.500000 1 [1.0]
-"""
+""")
-CPUSTATS_UNIQ_OUTPUT = \
-"""
+CPUSTATS_UNIQ_OUTPUT = ("""
/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000
/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1512000
/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 2016000
@@ -326,7 +327,7 @@
soc-thermal 45456
little-cpu 42555
big-cpu 61724
-"""
+""")
CPUSTATS_UNIQ_DATA = {
'cpufreq': {
'cpu0': [1512, 1500],
@@ -339,8 +340,7 @@
'big-cpu': [51.2, 61.7]
}
}
-CPUSTATS_DUPL_OUTPUT = \
-"""
+CPUSTATS_DUPL_OUTPUT = ("""
/sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_cur_freq 1512000
/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1512000
/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1512000
@@ -353,7 +353,7 @@
/sys/devices/system/cpu/cpu1/cpufreq/cpuinfo_cur_freq 1614000
/sys/devices/system/cpu/cpu2/cpufreq/cpuinfo_cur_freq 1614000
/sys/devices/system/cpu/cpu3/cpufreq/cpuinfo_cur_freq 1982000
-"""
+""")
CPUSTATS_DUPL_DATA = {
'cpufreq': {
'cpu0': [1512, 1500, 1614],
@@ -363,8 +363,7 @@
TMP_DIR1 = '/tmp/tmpAbcXyz'
-HISTOGRAMSET = \
-"""
+HISTOGRAMSET = ("""
[
{
"values": [
@@ -436,14 +435,13 @@
}
]
-"""
+""")
# pylint: enable=line-too-long
class MockResult(Result):
"""Mock result class."""
-
def __init__(self, mylogger, label, logging_level, machine):
super(MockResult, self).__init__(mylogger, label, logging_level, machine)
@@ -459,7 +457,6 @@
class ResultTest(unittest.TestCase):
"""Result test class."""
-
def __init__(self, *args, **kwargs):
super(ResultTest, self).__init__(*args, **kwargs)
self.callFakeProcessResults = False
@@ -488,8 +485,8 @@
def testCreateFromRun(self):
result = MockResult.CreateFromRun(logger.GetLogger(), 'average',
- self.mock_label, 'remote1', OUTPUT, error,
- 0, True)
+ self.mock_label, 'remote1', OUTPUT,
+ error, 0, True)
self.assertEqual(result.keyvals, keyvals)
self.assertEqual(result.chroot_results_dir,
'/tmp/test_that.PO1234567/platform_LibCBench')
@@ -523,8 +520,8 @@
second_args = mock_copyfiles.call_args_list[1][0]
third_args = mock_copyfiles.call_args_list[2][0]
self.assertEqual(first_args, ('src_file_1', '/tmp/test/src_file_1.0'))
- self.assertEqual(second_args, ('src_file_2', '/tmp/test/src_file_2.0'))
- self.assertEqual(third_args, ('src_file_3', '/tmp/test/src_file_3.0'))
+ self.assertEqual(second_args, ('src_file_2', '/tmp/test/src_file_2.1'))
+ self.assertEqual(third_args, ('src_file_3', '/tmp/test/src_file_3.2'))
mock_runcmd.reset_mock()
mock_copyfiles.reset_mock()
@@ -537,7 +534,8 @@
mock_runcmd.call_args_list[1])
self.assertEqual(mock_runcmd.call_args_list[0],
mock_runcmd.call_args_list[2])
- self.assertEqual(mock_runcmd.call_args_list[0][0], ('mkdir -p /tmp/test',))
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('mkdir -p /tmp/test', ))
# test 3. CopyFiles returns 1 (fails).
mock_copyfiles.return_value = 1
@@ -719,7 +717,8 @@
mock_mkdtemp.return_value = TMP_DIR1
mock_chrootruncmd.return_value = [
- '', ('%s,PASS\n%s/telemetry_Crosperf,PASS\n') % (TMP_DIR1, TMP_DIR1), ''
+ '', ('%s,PASS\n%s/telemetry_Crosperf,PASS\n') % (TMP_DIR1, TMP_DIR1),
+ ''
]
mock_getpath.return_value = TMP_DIR1
self.result.ce.ChrootRunCommandWOutput = mock_chrootruncmd
@@ -734,7 +733,7 @@
self.assertEqual(self.kv_dict, {'': 'PASS', 'telemetry_Crosperf': 'PASS'})
self.assertEqual(mock_runcmd.call_count, 1)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('cp -r /tmp/test_that_resultsNmq/* %s' % TMP_DIR1,))
+ ('cp -r /tmp/test_that_resultsNmq/* %s' % TMP_DIR1, ))
self.assertEqual(mock_chrootruncmd.call_count, 1)
self.assertEqual(
mock_chrootruncmd.call_args_list[0][0],
@@ -773,15 +772,26 @@
@mock.patch.object(misc, 'GetInsideChrootPath')
@mock.patch.object(command_executer.CommandExecuter,
'ChrootRunCommandWOutput')
- def test_get_samples(self, mock_chrootruncmd, mock_getpath):
- fake_file = '/usr/chromeos/chroot/tmp/results/fake_file'
+ @mock.patch.object(os.path, 'exists')
+ def test_get_samples(self, mock_exists, mock_get_total_samples,
+ mock_getpath):
self.result.perf_data_files = ['/tmp/results/perf.data']
self.result.board = 'samus'
- mock_getpath.return_value = fake_file
- self.result.ce.ChrootRunCommandWOutput = mock_chrootruncmd
- mock_chrootruncmd.return_value = ['', '45.42% 237210 chrome ', '']
- samples = self.result.GetSamples()
- self.assertEqual(samples, [237210, u'samples'])
+ mock_getpath.return_value = '/usr/chromeos/chroot/tmp/results/perf.data'
+ mock_get_total_samples.return_value = [
+ '', '45.42% 237210 chrome ', ''
+ ]
+ mock_exists.return_value = True
+
+ # mock_open does not seem to support iteration.
+ # pylint: disable=line-too-long
+ content = """1.63% 66 dav1d-tile chrome [.] decode_coefs
+ 1.48% 60 swapper [kernel.kallsyms] [k] intel_idle
+ 1.16% 47 dav1d-tile chrome [.] decode_sb"""
+
+ with mock.patch('builtins.open', return_value=io.StringIO(content)):
+ samples = self.result.GetSamples()
+ self.assertEqual(samples, [237210 - 60, u'samples'])
def test_get_results_dir(self):
@@ -805,7 +815,7 @@
res = self.result.FindFilesInResultsDir('-name perf.data')
self.assertEqual(mock_runcmd.call_count, 1)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('find /tmp/test_results -name perf.data',))
+ ('find /tmp/test_results -name perf.data', ))
self.assertEqual(res, '/tmp/test_results/perf.data')
mock_runcmd.reset_mock()
@@ -821,7 +831,8 @@
self.result.FindFilesInResultsDir = mock_findfiles
res = self.result.GetPerfDataFiles()
self.assertEqual(res, ['line1', 'line1'])
- self.assertEqual(mock_findfiles.call_args_list[0][0], ('-name perf.data',))
+ self.assertEqual(mock_findfiles.call_args_list[0][0],
+ ('-name perf.data', ))
def test_get_perf_report_files(self):
self.args = None
@@ -952,16 +963,18 @@
"""Verify perf PID which is present in TOP_DATA."""
self.result.top_cmds = TOP_DATA
# pid is present in TOP_DATA.
- with mock.patch.object(
- Result, 'ReadPidFromPerfData', return_value=['5713']):
+ with mock.patch.object(Result,
+ 'ReadPidFromPerfData',
+ return_value=['5713']):
self.result.VerifyPerfDataPID()
def test_verify_perf_data_pid_fail(self):
"""Test perf PID missing in top raises the error."""
self.result.top_cmds = TOP_DATA
# pid is not in the list of top processes.
- with mock.patch.object(
- Result, 'ReadPidFromPerfData', return_value=['9999']):
+ with mock.patch.object(Result,
+ 'ReadPidFromPerfData',
+ return_value=['9999']):
with self.assertRaises(PidVerificationError):
self.result.VerifyPerfDataPID()
@@ -970,7 +983,9 @@
def test_read_pid_from_perf_data_ok(self, mock_runcmd):
"""Test perf header parser, normal flow."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
exp_pid = '12345'
mock_runcmd.return_value = (0, PERF_DATA_HEADER.format(pid=exp_pid), '')
pids = self.result.ReadPidFromPerfData()
@@ -1001,7 +1016,9 @@
def test_read_pid_from_perf_data_no_pid(self, mock_runcmd):
"""Test perf.data without PID."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
cmd_line = '# cmdline : /usr/bin/perf record -e instructions'
mock_runcmd.return_value = (0, cmd_line, '')
pids = self.result.ReadPidFromPerfData()
@@ -1013,7 +1030,9 @@
def test_read_pid_from_perf_data_system_wide(self, mock_runcmd):
"""Test reading from system-wide profile with PID."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
# There is '-p <pid>' in command line but it's still system-wide: '-a'.
cmd_line = '# cmdline : /usr/bin/perf record -e instructions -a -p 1234'
mock_runcmd.return_value = (0, cmd_line, '')
@@ -1026,7 +1045,9 @@
def test_read_pid_from_perf_data_read_fail(self, mock_runcmd):
"""Failure to read perf.data raises the error."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
# Error status of the profile read.
mock_runcmd.return_value = (1, '', '')
with self.assertRaises(PerfDataReadError):
@@ -1037,7 +1058,9 @@
def test_read_pid_from_perf_data_fail(self, mock_runcmd):
"""Failure to find cmdline in perf.data header raises the error."""
self.result.ce.ChrootRunCommandWOutput = mock_runcmd
- self.result.perf_data_files = ['/tmp/chromeos/chroot/tmp/results/perf.data']
+ self.result.perf_data_files = [
+ '/tmp/chromeos/chroot/tmp/results/perf.data'
+ ]
# Empty output.
mock_runcmd.return_value = (0, '', '')
with self.assertRaises(PerfDataReadError):
@@ -1262,12 +1285,11 @@
self.assertEqual(mock_chrootruncmd.call_args_list[0][0],
(self.result.chromeos_root,
('/usr/sbin/perf report -n --symfs /tmp/debug '
- '--vmlinux /tmp/debug/boot/vmlinux '
+ '--vmlinux /tmp/debug/usr/lib/debug/boot/vmlinux '
'-i %s --stdio > %s') % (fake_file, fake_file)))
@mock.patch.object(misc, 'GetOutsideChrootPath')
def test_populate_from_run(self, mock_getpath):
-
def FakeGetResultsDir():
self.callGetResultsDir = True
return '/tmp/results_dir'
@@ -1355,7 +1377,6 @@
return {'Total': 10}
def test_process_results(self):
-
def FakeGatherPerfResults():
self.callGatherPerfResults = True
@@ -1401,16 +1422,17 @@
self.result.ProcessResults()
shutil.rmtree(os.path.dirname(self.result.results_file[0]))
# Verify the summary for the story is correct
- self.assertEqual(self.result.keyvals['timeToFirstContentfulPaint__typical'],
- [880.000, u'ms_smallerIsBetter'])
+ self.assertEqual(
+ self.result.keyvals['timeToFirstContentfulPaint__typical'],
+ [880.000, u'ms_smallerIsBetter'])
# Veirfy the summary for a certain stroy tag is correct
self.assertEqual(
- self.result
- .keyvals['timeToFirstContentfulPaint__cache_temperature:cold'],
+ self.result.
+ keyvals['timeToFirstContentfulPaint__cache_temperature:cold'],
[1000.000, u'ms_smallerIsBetter'])
self.assertEqual(
- self.result
- .keyvals['timeToFirstContentfulPaint__cache_temperature:warm'],
+ self.result.
+ keyvals['timeToFirstContentfulPaint__cache_temperature:warm'],
[800.000, u'ms_smallerIsBetter'])
@mock.patch.object(Result, 'ProcessCpustatsResults')
@@ -1566,7 +1588,8 @@
u'telemetry_page_measurement_results__num_errored': [0, u'count'],
u'string-fasta__string-fasta': [23.2, u'ms'],
u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
- u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
+ u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte':
+ [3.2, u'ms'],
u'access-nsieve__access-nsieve': [7.9, u'ms'],
u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
u'string-validate-input__string-validate-input': [19.3, u'ms'],
@@ -1604,7 +1627,8 @@
u'telemetry_page_measurement_results__num_errored': [0, u'count'],
u'string-fasta__string-fasta': [23.2, u'ms'],
u'crypto-sha1__crypto-sha1': [11.6, u'ms'],
- u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte': [3.2, u'ms'],
+ u'bitops-3bit-bits-in-byte__bitops-3bit-bits-in-byte':
+ [3.2, u'ms'],
u'access-nsieve__access-nsieve': [7.9, u'ms'],
u'bitops-nsieve-bits__bitops-nsieve-bits': [9.4, u'ms'],
u'string-validate-input__string-validate-input': [19.3, u'ms'],
@@ -1651,8 +1675,9 @@
self.assertEqual(mock_getroot.call_count, 1)
self.assertEqual(mock_runcmd.call_count, 2)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('rm -rf test_results_dir',))
- self.assertEqual(mock_runcmd.call_args_list[1][0], ('rm -rf testtemp_dir',))
+ ('rm -rf test_results_dir', ))
+ self.assertEqual(mock_runcmd.call_args_list[1][0],
+ ('rm -rf testtemp_dir', ))
# Test 2. Same, except ath results_dir name does not contain
# 'test_that_results_'
@@ -1666,8 +1691,9 @@
self.assertEqual(mock_getroot.call_count, 1)
self.assertEqual(mock_runcmd.call_count, 2)
self.assertEqual(mock_runcmd.call_args_list[0][0],
- ('rm -rf /tmp/tmp_AbcXyz',))
- self.assertEqual(mock_runcmd.call_args_list[1][0], ('rm -rf testtemp_dir',))
+ ('rm -rf /tmp/tmp_AbcXyz', ))
+ self.assertEqual(mock_runcmd.call_args_list[1][0],
+ ('rm -rf testtemp_dir', ))
# Test 3. mock_getroot returns nothing; 'rm_chroot_tmp' is False.
mock_getroot.reset_mock()
@@ -1675,7 +1701,8 @@
self.result.CleanUp(False)
self.assertEqual(mock_getroot.call_count, 0)
self.assertEqual(mock_runcmd.call_count, 1)
- self.assertEqual(mock_runcmd.call_args_list[0][0], ('rm -rf testtemp_dir',))
+ self.assertEqual(mock_runcmd.call_args_list[0][0],
+ ('rm -rf testtemp_dir', ))
# Test 4. 'rm_chroot_tmp' is True, but result_dir & temp_dir are None.
mock_getroot.reset_mock()
@@ -1689,7 +1716,6 @@
@mock.patch.object(misc, 'GetInsideChrootPath')
@mock.patch.object(command_executer.CommandExecuter, 'ChrootRunCommand')
def test_store_to_cache_dir(self, mock_chrootruncmd, mock_getpath):
-
def FakeMkdtemp(directory=''):
if directory:
pass
@@ -1724,7 +1750,7 @@
base_dir = os.path.join(os.getcwd(), 'test_cache/compare_output')
self.assertTrue(os.path.exists(os.path.join(test_dir, 'autotest.tbz2')))
self.assertTrue(os.path.exists(os.path.join(test_dir, 'machine.txt')))
- self.assertTrue(os.path.exists(os.path.join(test_dir, 'results.txt')))
+ self.assertTrue(os.path.exists(os.path.join(test_dir, 'results.pickle')))
f1 = os.path.join(test_dir, 'machine.txt')
f2 = os.path.join(base_dir, 'machine.txt')
@@ -1732,11 +1758,13 @@
[_, out, _] = self.result.ce.RunCommandWOutput(cmd)
self.assertEqual(len(out), 0)
- f1 = os.path.join(test_dir, 'results.txt')
- f2 = os.path.join(base_dir, 'results.txt')
- cmd = 'diff %s %s' % (f1, f2)
- [_, out, _] = self.result.ce.RunCommandWOutput(cmd)
- self.assertEqual(len(out), 0)
+ f1 = os.path.join(test_dir, 'results.pickle')
+ f2 = os.path.join(base_dir, 'results.pickle')
+ with open(f1, 'rb') as f:
+ f1_obj = pickle.load(f)
+ with open(f2, 'rb') as f:
+ f2_obj = pickle.load(f)
+ self.assertEqual(f1_obj, f2_obj)
# Clean up after test.
tempfile.mkdtemp = save_real_mkdtemp
@@ -1747,87 +1775,87 @@
TELEMETRY_RESULT_KEYVALS = {
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'math-cordic (ms)':
- '11.4',
+ '11.4',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-nbody (ms)':
- '6.9',
+ '6.9',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-fannkuch (ms)':
- '26.3',
+ '26.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'math-spectral-norm (ms)':
- '6.3',
+ '6.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-nsieve-bits (ms)':
- '9.3',
+ '9.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'math-partial-sums (ms)':
- '32.8',
+ '32.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'regexp-dna (ms)':
- '16.1',
+ '16.1',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'3d-cube (ms)':
- '42.7',
+ '42.7',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'crypto-md5 (ms)':
- '10.8',
+ '10.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'crypto-sha1 (ms)':
- '12.4',
+ '12.4',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-tagcloud (ms)':
- '47.2',
+ '47.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-fasta (ms)':
- '36.3',
+ '36.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-binary-trees (ms)':
- '7.3',
+ '7.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'date-format-xparb (ms)':
- '138.1',
+ '138.1',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'crypto-aes (ms)':
- '19.2',
+ '19.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'Total (ms)':
- '656.5',
+ '656.5',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-base64 (ms)':
- '17.5',
+ '17.5',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-validate-input (ms)':
- '24.8',
+ '24.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'3d-raytrace (ms)':
- '28.7',
+ '28.7',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'controlflow-recursive (ms)':
- '5.3',
+ '5.3',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-bits-in-byte (ms)':
- '9.8',
+ '9.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'3d-morph (ms)':
- '50.2',
+ '50.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-bitwise-and (ms)':
- '8.8',
+ '8.8',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'access-nsieve (ms)':
- '8.6',
+ '8.6',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'date-format-tofte (ms)':
- '31.2',
+ '31.2',
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'bitops-3bit-bits-in-byte (ms)':
- '3.5',
+ '3.5',
'retval':
- 0,
+ 0,
'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html '
'string-unpack-code (ms)':
- '45.0'
+ '45.0'
}
PURE_TELEMETRY_OUTPUT = """
@@ -1837,7 +1865,6 @@
class TelemetryResultTest(unittest.TestCase):
"""Telemetry result test."""
-
def __init__(self, *args, **kwargs):
super(TelemetryResultTest, self).__init__(*args, **kwargs)
self.callFakeProcessResults = False
@@ -1848,12 +1875,10 @@
'autotest_dir', 'debug_dir', '/tmp', 'lumpy',
'remote', 'image_args', 'cache_dir', 'average',
'gcc', False, None)
- self.mock_machine = machine_manager.MockCrosMachine('falco.cros',
- '/tmp/chromeos',
- 'average')
+ self.mock_machine = machine_manager.MockCrosMachine(
+ 'falco.cros', '/tmp/chromeos', 'average')
def test_populate_from_run(self):
-
def FakeProcessResults():
self.callFakeProcessResults = True
@@ -1884,7 +1909,6 @@
class ResultsCacheTest(unittest.TestCase):
"""Resultcache test class."""
-
def __init__(self, *args, **kwargs):
super(ResultsCacheTest, self).__init__(*args, **kwargs)
self.fakeCacheReturnResult = None
@@ -1926,7 +1950,6 @@
@mock.patch.object(image_checksummer.ImageChecksummer, 'Checksum')
def test_get_cache_dir_for_write(self, mock_checksum):
-
def FakeGetMachines(label):
if label:
pass
@@ -1940,8 +1963,8 @@
mock_checksum.return_value = 'FakeImageChecksumabc123'
self.results_cache.machine_manager.GetMachines = FakeGetMachines
- self.results_cache.machine_manager.machine_checksum['mock_label'] = \
- 'FakeMachineChecksumabc987'
+ self.results_cache.machine_manager.machine_checksum['mock_label'] = (
+ 'FakeMachineChecksumabc987')
# Based on the label, benchmark and machines, get the directory in which
# to store the cache information for this test run.
result_path = self.results_cache.GetCacheDirForWrite()
@@ -1987,8 +2010,8 @@
mock_checksum.return_value = 'FakeImageChecksumabc123'
self.results_cache.machine_manager.GetMachines = FakeGetMachines
- self.results_cache.machine_manager.machine_checksum['mock_label'] = \
- 'FakeMachineChecksumabc987'
+ self.results_cache.machine_manager.machine_checksum['mock_label'] = (
+ 'FakeMachineChecksumabc987')
# Test 1. Generating cache name for reading (not writing).
key_list = self.results_cache.GetCacheKeyList(True)
@@ -2035,7 +2058,8 @@
# Test 5. Generating cache name for writing, with local image type, and
# specifying that the image path must match the cached image path.
self.results_cache.label.image_type = 'local'
- self.results_cache.cache_conditions.append(CacheConditions.IMAGE_PATH_MATCH)
+ self.results_cache.cache_conditions.append(
+ CacheConditions.IMAGE_PATH_MATCH)
key_list = self.results_cache.GetCacheKeyList(False)
self.assertEqual(key_list[0], '54524606abaae4fdf7b02f49f7ae7127')
self.assertEqual(key_list[3], 'fda29412ceccb72977516c4785d08e2c')
diff --git a/crosperf/schedv2.py b/crosperf/schedv2.py
index 68e1e5b..49c6344 100644
--- a/crosperf/schedv2.py
+++ b/crosperf/schedv2.py
@@ -108,8 +108,8 @@
if self._terminated:
return 1
- if self._sched.get_experiment().skylab:
- self._logger.LogOutput('Skylab mode, do not image before testing.')
+ if self._sched.get_experiment().crosfleet:
+ self._logger.LogOutput('Crosfleet mode, do not image before testing.')
self._dut.label = label
return 0
@@ -295,9 +295,9 @@
# Split benchmarkruns set into segments. Each segment will be handled by
# a thread. Note, we use (x+3)/4 to mimic math.ceil(x/4).
n_threads = max(2, min(20, (n_benchmarkruns + 3) // 4))
- self._logger.LogOutput(('Starting {} threads to read cache status for '
- '{} benchmark runs ...').format(
- n_threads, n_benchmarkruns))
+ self._logger.LogOutput(
+ ('Starting {} threads to read cache status for '
+ '{} benchmark runs ...').format(n_threads, n_benchmarkruns))
benchmarkruns_per_thread = (n_benchmarkruns + n_threads - 1) // n_threads
benchmarkrun_segments = []
for i in range(n_threads - 1):
diff --git a/crosperf/settings_factory.py b/crosperf/settings_factory.py
index 7033a3e..78834c6 100644
--- a/crosperf/settings_factory.py
+++ b/crosperf/settings_factory.py
@@ -22,14 +22,13 @@
def __init__(self, name):
super(BenchmarkSettings, self).__init__(name, 'benchmark')
self.AddField(
- TextField(
- 'test_name',
- description='The name of the test to run. '
- 'Defaults to the name of the benchmark.'))
+ TextField('test_name',
+ description='The name of the test to run. '
+ 'Defaults to the name of the benchmark.'))
self.AddField(
- TextField(
- 'test_args', description='Arguments to be passed to the '
- 'test.'))
+ TextField('test_args',
+ description='Arguments to be passed to the '
+ 'test.'))
self.AddField(
IntegerField(
'iterations',
@@ -39,24 +38,21 @@
'If not set, will run each benchmark test the optimum number of '
'times to get a stable result.'))
self.AddField(
- TextField(
- 'suite',
- default='test_that',
- description='The type of the benchmark.'))
+ TextField('suite',
+ default='test_that',
+ description='The type of the benchmark.'))
self.AddField(
- IntegerField(
- 'retries',
- default=0,
- description='Number of times to retry a '
- 'benchmark run.'))
+ IntegerField('retries',
+ default=0,
+ description='Number of times to retry a '
+ 'benchmark run.'))
self.AddField(
- BooleanField(
- 'run_local',
- description='Run benchmark harness on the DUT. '
- 'Currently only compatible with the suite: '
- 'telemetry_Crosperf.',
- required=False,
- default=True))
+ BooleanField('run_local',
+ description='Run benchmark harness on the DUT. '
+ 'Currently only compatible with the suite: '
+ 'telemetry_Crosperf.',
+ required=False,
+ default=True))
self.AddField(
FloatField(
'weight',
@@ -70,12 +66,11 @@
def __init__(self, name):
super(LabelSettings, self).__init__(name, 'label')
self.AddField(
- TextField(
- 'chromeos_image',
- required=False,
- description='The path to the image to run tests '
- 'on, for local/custom-built images. See the '
- "'build' option for official or trybot images."))
+ TextField('chromeos_image',
+ required=False,
+ description='The path to the image to run tests '
+ 'on, for local/custom-built images. See the '
+ "'build' option for official or trybot images."))
self.AddField(
TextField(
'autotest_path',
@@ -90,53 +85,46 @@
description='Debug info directory relative to chroot which has '
'symbols and vmlinux that can be used by perf tool.'))
self.AddField(
- TextField(
- 'chromeos_root',
- description='The path to a chromeos checkout which '
- 'contains a src/scripts directory. Defaults to '
- 'the chromeos checkout which contains the '
- 'chromeos_image.'))
+ TextField('chromeos_root',
+ description='The path to a chromeos checkout which '
+ 'contains a src/scripts directory. Defaults to '
+ 'the chromeos checkout which contains the '
+ 'chromeos_image.'))
self.AddField(
- ListField(
- 'remote',
- description='A comma-separated list of IPs of chromeos'
- 'devices to run experiments on.'))
+ ListField('remote',
+ description='A comma-separated list of IPs of chromeos'
+ 'devices to run experiments on.'))
self.AddField(
- TextField(
- 'image_args',
- required=False,
- default='',
- description='Extra arguments to pass to '
- 'image_chromeos.py.'))
+ TextField('image_args',
+ required=False,
+ default='',
+ description='Extra arguments to pass to '
+ 'image_chromeos.py.'))
self.AddField(
- TextField(
- 'cache_dir',
- default='',
- description='The cache dir for this image.'))
+ TextField('cache_dir',
+ default='',
+ description='The cache dir for this image.'))
self.AddField(
- TextField(
- 'compiler',
- default='gcc',
- description='The compiler used to build the '
- 'ChromeOS image (gcc or llvm).'))
+ TextField('compiler',
+ default='gcc',
+ description='The compiler used to build the '
+ 'ChromeOS image (gcc or llvm).'))
self.AddField(
- TextField(
- 'chrome_src',
- description='The path to the source of chrome. '
- 'This is used to run telemetry benchmarks. '
- 'The default one is the src inside chroot.',
- required=False,
- default=''))
+ TextField('chrome_src',
+ description='The path to the source of chrome. '
+ 'This is used to run telemetry benchmarks. '
+ 'The default one is the src inside chroot.',
+ required=False,
+ default=''))
self.AddField(
- TextField(
- 'build',
- description='The xbuddy specification for an '
- 'official or trybot image to use for tests. '
- "'/remote' is assumed, and the board is given "
- "elsewhere, so omit the '/remote/<board>/' xbuddy "
- 'prefix.',
- required=False,
- default=''))
+ TextField('build',
+ description='The xbuddy specification for an '
+ 'official or trybot image to use for tests. '
+ "'/remote' is assumed, and the board is given "
+ "elsewhere, so omit the '/remote/<board>/' xbuddy "
+ 'prefix.',
+ required=False,
+ default=''))
class GlobalSettings(Settings):
@@ -145,67 +133,56 @@
def __init__(self, name):
super(GlobalSettings, self).__init__(name, 'global')
self.AddField(
- TextField(
- 'name',
- description='The name of the experiment. Just an '
- 'identifier.'))
+ TextField('name',
+ description='The name of the experiment. Just an '
+ 'identifier.'))
self.AddField(
- TextField(
- 'board',
- description='The target board for running '
- 'experiments on, e.g. x86-alex.'))
+ TextField('board',
+ description='The target board for running '
+ 'experiments on, e.g. x86-alex.'))
self.AddField(
- BooleanField(
- 'skylab',
- description='Whether to run experiments via skylab.',
- default=False))
+ BooleanField('crosfleet',
+ description='Whether to run experiments via crosfleet.',
+ default=False))
self.AddField(
- ListField(
- 'remote',
- description='A comma-separated list of IPs of '
- 'chromeos devices to run experiments on.'))
+ ListField('remote',
+ description='A comma-separated list of IPs of '
+ 'chromeos devices to run experiments on.'))
self.AddField(
- BooleanField(
- 'rerun_if_failed',
- description='Whether to re-run failed test runs '
- 'or not.',
- default=False))
+ BooleanField('rerun_if_failed',
+ description='Whether to re-run failed test runs '
+ 'or not.',
+ default=False))
self.AddField(
- BooleanField(
- 'rm_chroot_tmp',
- default=False,
- description='Whether to remove the test_that '
- 'result in the chroot.'))
+ BooleanField('rm_chroot_tmp',
+ default=False,
+ description='Whether to remove the test_that '
+ 'result in the chroot.'))
self.AddField(
- ListField(
- 'email',
- description='Space-separated list of email '
- 'addresses to send email to.'))
+ ListField('email',
+ description='Space-separated list of email '
+ 'addresses to send email to.'))
self.AddField(
- BooleanField(
- 'rerun',
- description='Whether to ignore the cache and '
- 'for tests to be re-run.',
- default=False))
+ BooleanField('rerun',
+ description='Whether to ignore the cache and '
+ 'for tests to be re-run.',
+ default=False))
self.AddField(
- BooleanField(
- 'same_specs',
- default=True,
- description='Ensure cached runs are run on the '
- 'same kind of devices which are specified as a '
- 'remote.'))
+ BooleanField('same_specs',
+ default=True,
+ description='Ensure cached runs are run on the '
+ 'same kind of devices which are specified as a '
+ 'remote.'))
self.AddField(
- BooleanField(
- 'same_machine',
- default=False,
- description='Ensure cached runs are run on the '
- 'same remote.'))
+ BooleanField('same_machine',
+ default=False,
+ description='Ensure cached runs are run on the '
+ 'same remote.'))
self.AddField(
- BooleanField(
- 'use_file_locks',
- default=False,
- description='DEPRECATED: Whether to use the file locks '
- 'or AFE server lock mechanism.'))
+ BooleanField('use_file_locks',
+ default=False,
+ description='DEPRECATED: Whether to use the file locks '
+ 'or AFE server lock mechanism.'))
self.AddField(
IntegerField(
'iterations',
@@ -215,79 +192,68 @@
'If not set, will run each benchmark test the optimum number of '
'times to get a stable result.'))
self.AddField(
- TextField(
- 'chromeos_root',
- description='The path to a chromeos checkout which '
- 'contains a src/scripts directory. Defaults to '
- 'the chromeos checkout which contains the '
- 'chromeos_image.'))
+ TextField('chromeos_root',
+ description='The path to a chromeos checkout which '
+ 'contains a src/scripts directory. Defaults to '
+ 'the chromeos checkout which contains the '
+ 'chromeos_image.'))
self.AddField(
- TextField(
- 'logging_level',
- default='average',
- description='The level of logging desired. '
- "Options are 'quiet', 'average', and 'verbose'."))
+ TextField('logging_level',
+ default='average',
+ description='The level of logging desired. '
+ "Options are 'quiet', 'average', and 'verbose'."))
self.AddField(
- IntegerField(
- 'acquire_timeout',
- default=0,
- description='Number of seconds to wait for '
- 'machine before exit if all the machines in '
- 'the experiment file are busy. Default is 0.'))
+ IntegerField('acquire_timeout',
+ default=0,
+ description='Number of seconds to wait for '
+ 'machine before exit if all the machines in '
+ 'the experiment file are busy. Default is 0.'))
self.AddField(
- TextField(
- 'perf_args',
- default='',
- description='The optional profile command. It '
- 'enables perf commands to record perforamance '
- 'related counters. It must start with perf '
- 'command record or stat followed by arguments.'))
+ TextField('perf_args',
+ default='',
+ description='The optional profile command. It '
+ 'enables perf commands to record perforamance '
+ 'related counters. It must start with perf '
+ 'command record or stat followed by arguments.'))
self.AddField(
- BooleanField(
- 'download_debug',
- default=True,
- description='Download compressed debug symbols alongwith '
- 'image. This can provide more info matching symbols for'
- 'profiles, but takes larger space. By default, download'
- 'it only when perf_args is specified.'))
+ BooleanField('download_debug',
+ default=True,
+ description='Download compressed debug symbols alongwith '
+ 'image. This can provide more info matching symbols for'
+ 'profiles, but takes larger space. By default, download'
+ 'it only when perf_args is specified.'))
self.AddField(
- TextField(
- 'cache_dir',
- default='',
- description='The abs path of cache dir. '
- 'Default is /home/$(whoami)/cros_scratch.'))
+ TextField('cache_dir',
+ default='',
+ description='The abs path of cache dir. '
+ 'Default is /home/$(whoami)/cros_scratch.'))
self.AddField(
- BooleanField(
- 'cache_only',
- default=False,
- description='Whether to use only cached '
- 'results (do not rerun failed tests).'))
+ BooleanField('cache_only',
+ default=False,
+ description='Whether to use only cached '
+ 'results (do not rerun failed tests).'))
self.AddField(
- BooleanField(
- 'no_email',
- default=False,
- description='Whether to disable the email to '
- 'user after crosperf finishes.'))
+ BooleanField('no_email',
+ default=False,
+ description='Whether to disable the email to '
+ 'user after crosperf finishes.'))
self.AddField(
- BooleanField(
- 'json_report',
- default=False,
- description='Whether to generate a json version '
- 'of the report, for archiving.'))
+ BooleanField('json_report',
+ default=False,
+ description='Whether to generate a json version '
+ 'of the report, for archiving.'))
self.AddField(
- BooleanField(
- 'show_all_results',
- default=False,
- description='When running Telemetry tests, '
- 'whether to all the results, instead of just '
- 'the default (summary) results.'))
+ BooleanField('show_all_results',
+ default=False,
+ description='When running Telemetry tests, '
+ 'whether to all the results, instead of just '
+ 'the default (summary) results.'))
self.AddField(
- TextField(
- 'share_cache',
- default='',
- description='Path to alternate cache whose data '
- 'you want to use. It accepts multiple directories '
- 'separated by a ",".'))
+ TextField('share_cache',
+ default='',
+ description='Path to alternate cache whose data '
+ 'you want to use. It accepts multiple directories '
+ 'separated by a ",".'))
self.AddField(
TextField('results_dir', default='', description='The results dir.'))
self.AddField(
@@ -297,55 +263,49 @@
description='Whether to compress all test results other than '
'reports into a tarball to save disk space.'))
self.AddField(
- TextField(
- 'locks_dir',
- default='',
- description='An alternate directory to use for '
- 'storing/checking machine file locks for local machines. '
- 'By default the file locks directory is '
- '/google/data/rw/users/mo/mobiletc-prebuild/locks.\n'
- 'WARNING: If you use your own locks directory, '
- 'there is no guarantee that someone else might not '
- 'hold a lock on the same machine in a different '
- 'locks directory.'))
+ TextField('locks_dir',
+ default='',
+ description='An alternate directory to use for '
+ 'storing/checking machine file locks for local machines. '
+ 'By default the file locks directory is '
+ '/google/data/rw/users/mo/mobiletc-prebuild/locks.\n'
+ 'WARNING: If you use your own locks directory, '
+ 'there is no guarantee that someone else might not '
+ 'hold a lock on the same machine in a different '
+ 'locks directory.'))
self.AddField(
- TextField(
- 'chrome_src',
- description='The path to the source of chrome. '
- 'This is used to run telemetry benchmarks. '
- 'The default one is the src inside chroot.',
- required=False,
- default=''))
+ TextField('chrome_src',
+ description='The path to the source of chrome. '
+ 'This is used to run telemetry benchmarks. '
+ 'The default one is the src inside chroot.',
+ required=False,
+ default=''))
self.AddField(
- IntegerField(
- 'retries',
- default=0,
- description='Number of times to retry a '
- 'benchmark run.'))
+ IntegerField('retries',
+ default=0,
+ description='Number of times to retry a '
+ 'benchmark run.'))
self.AddField(
- TextField(
- 'cwp_dso',
- description='The DSO type that we want to use for '
- 'CWP approximation. This is used to run telemetry '
- 'benchmarks. Valid DSO types can be found from dso_list '
- 'in experiment_factory.py. The default value is set to '
- 'be empty.',
- required=False,
- default=''))
+ TextField('cwp_dso',
+ description='The DSO type that we want to use for '
+ 'CWP approximation. This is used to run telemetry '
+ 'benchmarks. Valid DSO types can be found from dso_list '
+ 'in experiment_factory.py. The default value is set to '
+ 'be empty.',
+ required=False,
+ default=''))
self.AddField(
- BooleanField(
- 'enable_aslr',
- description='Enable ASLR on the machine to run the '
- 'benchmarks. ASLR is disabled by default',
- required=False,
- default=False))
+ BooleanField('enable_aslr',
+ description='Enable ASLR on the machine to run the '
+ 'benchmarks. ASLR is disabled by default',
+ required=False,
+ default=False))
self.AddField(
- BooleanField(
- 'ignore_min_max',
- description='When doing math for the raw results, '
- 'ignore min and max values to reduce noise.',
- required=False,
- default=False))
+ BooleanField('ignore_min_max',
+ description='When doing math for the raw results, '
+ 'ignore min and max values to reduce noise.',
+ required=False,
+ default=False))
self.AddField(
TextField(
'intel_pstate',
@@ -356,12 +316,11 @@
required=False,
default='no_hwp'))
self.AddField(
- BooleanField(
- 'turbostat',
- description='Run turbostat process in the background'
- ' of a benchmark. Enabled by default.',
- required=False,
- default=True))
+ BooleanField('turbostat',
+ description='Run turbostat process in the background'
+ ' of a benchmark. Enabled by default.',
+ required=False,
+ default=True))
self.AddField(
FloatField(
'top_interval',
@@ -377,22 +336,20 @@
required=False,
default=1))
self.AddField(
- IntegerField(
- 'cooldown_temp',
- required=False,
- default=40,
- description='Wait until CPU temperature goes down below'
- ' specified temperature in Celsius'
- ' prior starting a benchmark. '
- 'By default the value is set to 40 degrees.'))
+ IntegerField('cooldown_temp',
+ required=False,
+ default=40,
+ description='Wait until CPU temperature goes down below'
+ ' specified temperature in Celsius'
+ ' prior starting a benchmark. '
+ 'By default the value is set to 40 degrees.'))
self.AddField(
- IntegerField(
- 'cooldown_time',
- required=False,
- default=10,
- description='Wait specified time in minutes allowing'
- ' CPU to cool down. Zero value disables cooldown. '
- 'The default value is 10 minutes.'))
+ IntegerField('cooldown_time',
+ required=False,
+ default=10,
+ description='Wait specified time in minutes allowing'
+ ' CPU to cool down. Zero value disables cooldown. '
+ 'The default value is 10 minutes.'))
self.AddField(
EnumField(
'governor',
@@ -439,6 +396,12 @@
' or equal to a percent of max_freq. '
'CPU frequency is reduced to 95%% by default to reduce thermal '
'throttling.'))
+ self.AddField(
+ BooleanField(
+ 'no_lock',
+ default=False,
+ description='Do not attempt to lock the DUT.'
+ ' Useful when lock is held externally, say with crosfleet.'))
class SettingsFactory(object):
diff --git a/crosperf/settings_factory_unittest.py b/crosperf/settings_factory_unittest.py
index bc10711..8277e87 100755
--- a/crosperf/settings_factory_unittest.py
+++ b/crosperf/settings_factory_unittest.py
@@ -50,10 +50,10 @@
def test_init(self):
res = settings_factory.GlobalSettings('g_settings')
self.assertIsNotNone(res)
- self.assertEqual(len(res.fields), 39)
+ self.assertEqual(len(res.fields), 40)
self.assertEqual(res.GetField('name'), '')
self.assertEqual(res.GetField('board'), '')
- self.assertEqual(res.GetField('skylab'), False)
+ self.assertEqual(res.GetField('crosfleet'), False)
self.assertEqual(res.GetField('remote'), None)
self.assertEqual(res.GetField('rerun_if_failed'), False)
self.assertEqual(res.GetField('rm_chroot_tmp'), False)
@@ -108,7 +108,7 @@
g_settings = settings_factory.SettingsFactory().GetSettings(
'global', 'global')
self.assertIsInstance(g_settings, settings_factory.GlobalSettings)
- self.assertEqual(len(g_settings.fields), 39)
+ self.assertEqual(len(g_settings.fields), 40)
if __name__ == '__main__':
diff --git a/crosperf/suite_runner.py b/crosperf/suite_runner.py
index 17e1ad7..6bd4ff3 100644
--- a/crosperf/suite_runner.py
+++ b/crosperf/suite_runner.py
@@ -18,7 +18,7 @@
TEST_THAT_PATH = '/usr/bin/test_that'
TAST_PATH = '/usr/bin/tast'
-SKYLAB_PATH = '/usr/local/bin/skylab'
+CROSFLEET_PATH = 'crosfleet'
GS_UTIL = 'src/chromium/depot_tools/gsutil.py'
AUTOTEST_DIR = '/mnt/host/source/src/third_party/autotest/files'
CHROME_MOUNT_DIR = '/tmp/chrome_root'
@@ -75,8 +75,8 @@
def Run(self, cros_machine, label, benchmark, test_args, profiler_args):
machine_name = cros_machine.name
for i in range(0, benchmark.retries + 1):
- if label.skylab:
- ret_tup = self.Skylab_Run(label, benchmark, test_args, profiler_args)
+ if label.crosfleet:
+ ret_tup = self.Crosfleet_Run(label, benchmark, test_args, profiler_args)
else:
if benchmark.suite == 'tast':
ret_tup = self.Tast_Run(machine_name, label, benchmark)
@@ -87,12 +87,12 @@
self.logger.LogOutput('benchmark %s failed. Retries left: %s' %
(benchmark.name, benchmark.retries - i))
elif i > 0:
- self.logger.LogOutput(
- 'benchmark %s succeded after %s retries' % (benchmark.name, i))
+ self.logger.LogOutput('benchmark %s succeded after %s retries' %
+ (benchmark.name, i))
break
else:
- self.logger.LogOutput(
- 'benchmark %s succeded on first try' % benchmark.name)
+ self.logger.LogOutput('benchmark %s succeded on first try' %
+ benchmark.name)
break
return ret_tup
@@ -238,8 +238,8 @@
self.logger.LogOutput('Result downloaded for task %s' % task_id)
return status
- def Skylab_Run(self, label, benchmark, test_args, profiler_args):
- """Run the test via skylab.."""
+ def Crosfleet_Run(self, label, benchmark, test_args, profiler_args):
+ """Run the test via crosfleet.."""
options = []
if label.board:
options.append('-board=%s' % label.board)
@@ -257,19 +257,19 @@
dimensions.append('-dim dut_name:%s' % dut.rstrip('.cros'))
command = (('%s create-test %s %s %s') % \
- (SKYLAB_PATH, ' '.join(dimensions), ' '.join(options),
+ (CROSFLEET_PATH, ' '.join(dimensions), ' '.join(options),
benchmark.suite if
(benchmark.suite == 'telemetry_Crosperf' or
benchmark.suite == 'crosperf_Wrapper')
else benchmark.test_name))
if self.log_level != 'verbose':
- self.logger.LogOutput('Starting skylab test.')
+ self.logger.LogOutput('Starting crosfleet test.')
self.logger.LogOutput('CMD: %s' % command)
ret_tup = self._ce.RunCommandWOutput(command, command_terminator=self._ct)
if ret_tup[0] != 0:
- self.logger.LogOutput('Skylab test not created successfully.')
+ self.logger.LogOutput('Crosfleet test not created successfully.')
return ret_tup
# Std output of the command will look like:
@@ -278,9 +278,9 @@
# number in the very end of the link address.
task_id = ret_tup[1].strip().split('b')[-1]
- command = ('skylab wait-task %s' % task_id)
+ command = ('crosfleet wait-task %s' % task_id)
if self.log_level != 'verbose':
- self.logger.LogOutput('Waiting for skylab test to finish.')
+ self.logger.LogOutput('Waiting for crosfleet test to finish.')
self.logger.LogOutput('CMD: %s' % command)
ret_tup = self._ce.RunCommandWOutput(command, command_terminator=self._ct)
diff --git a/crosperf/suite_runner_unittest.py b/crosperf/suite_runner_unittest.py
index 86e1ef1..c1eacb3 100755
--- a/crosperf/suite_runner_unittest.py
+++ b/crosperf/suite_runner_unittest.py
@@ -64,16 +64,17 @@
def __init__(self, *args, **kwargs):
super(SuiteRunnerTest, self).__init__(*args, **kwargs)
- self.skylab_run_args = []
+ self.crosfleet_run_args = []
self.test_that_args = []
self.tast_args = []
- self.call_skylab_run = False
+ self.call_crosfleet_run = False
self.call_test_that_run = False
self.call_tast_run = False
def setUp(self):
- self.runner = suite_runner.SuiteRunner(
- {}, self.mock_logger, 'verbose', self.mock_cmd_exec, self.mock_cmd_term)
+ self.runner = suite_runner.SuiteRunner({}, self.mock_logger, 'verbose',
+ self.mock_cmd_exec,
+ self.mock_cmd_term)
def test_get_profiler_args(self):
input_str = ("--profiler=custom_perf --profiler_args='perf_options"
@@ -98,16 +99,18 @@
def reset():
self.test_that_args = []
- self.skylab_run_args = []
+ self.crosfleet_run_args = []
self.tast_args = []
self.call_test_that_run = False
- self.call_skylab_run = False
+ self.call_crosfleet_run = False
self.call_tast_run = False
- def FakeSkylabRun(test_label, benchmark, test_args, profiler_args):
- self.skylab_run_args = [test_label, benchmark, test_args, profiler_args]
- self.call_skylab_run = True
- return 'Ran FakeSkylabRun'
+ def FakeCrosfleetRun(test_label, benchmark, test_args, profiler_args):
+ self.crosfleet_run_args = [
+ test_label, benchmark, test_args, profiler_args
+ ]
+ self.call_crosfleet_run = True
+ return 'Ran FakeCrosfleetRun'
def FakeTestThatRun(machine, test_label, benchmark, test_args,
profiler_args):
@@ -122,7 +125,7 @@
self.call_tast_run = True
return 'Ran FakeTastRun'
- self.runner.Skylab_Run = FakeSkylabRun
+ self.runner.Crosfleet_Run = FakeCrosfleetRun
self.runner.Test_That_Run = FakeTestThatRun
self.runner.Tast_Run = FakeTastRun
@@ -137,31 +140,31 @@
test_args = ''
profiler_args = ''
- # Test skylab run for telemetry_Crosperf and crosperf_Wrapper benchmarks.
- self.mock_label.skylab = True
+ # Test crosfleet run for telemetry_Crosperf and crosperf_Wrapper benchmarks.
+ self.mock_label.crosfleet = True
reset()
self.runner.Run(cros_machine, self.mock_label, self.crosperf_wrapper_bench,
test_args, profiler_args)
- self.assertTrue(self.call_skylab_run)
+ self.assertTrue(self.call_crosfleet_run)
self.assertFalse(self.call_test_that_run)
- self.assertEqual(self.skylab_run_args,
+ self.assertEqual(self.crosfleet_run_args,
[self.mock_label, self.crosperf_wrapper_bench, '', ''])
reset()
self.runner.Run(cros_machine, self.mock_label,
self.telemetry_crosperf_bench, test_args, profiler_args)
- self.assertTrue(self.call_skylab_run)
+ self.assertTrue(self.call_crosfleet_run)
self.assertFalse(self.call_test_that_run)
- self.assertEqual(self.skylab_run_args,
+ self.assertEqual(self.crosfleet_run_args,
[self.mock_label, self.telemetry_crosperf_bench, '', ''])
# Test test_that run for telemetry_Crosperf and crosperf_Wrapper benchmarks.
- self.mock_label.skylab = False
+ self.mock_label.crosfleet = False
reset()
self.runner.Run(cros_machine, self.mock_label, self.crosperf_wrapper_bench,
test_args, profiler_args)
self.assertTrue(self.call_test_that_run)
- self.assertFalse(self.call_skylab_run)
+ self.assertFalse(self.call_crosfleet_run)
self.assertEqual(
self.test_that_args,
['fake_machine', self.mock_label, self.crosperf_wrapper_bench, '', ''])
@@ -170,7 +173,7 @@
self.runner.Run(cros_machine, self.mock_label,
self.telemetry_crosperf_bench, test_args, profiler_args)
self.assertTrue(self.call_test_that_run)
- self.assertFalse(self.call_skylab_run)
+ self.assertFalse(self.call_crosfleet_run)
self.assertEqual(self.test_that_args, [
'fake_machine', self.mock_label, self.telemetry_crosperf_bench, '', ''
])
@@ -180,7 +183,7 @@
self.runner.Run(cros_machine, self.mock_label, self.tast_bench, '', '')
self.assertTrue(self.call_tast_run)
self.assertFalse(self.call_test_that_run)
- self.assertFalse(self.call_skylab_run)
+ self.assertFalse(self.call_crosfleet_run)
self.assertEqual(self.tast_args,
['fake_machine', self.mock_label, self.tast_bench])
@@ -257,7 +260,7 @@
@mock.patch.object(command_executer.CommandExecuter, 'RunCommandWOutput')
@mock.patch.object(json, 'loads')
- def test_skylab_run_client(self, mock_json_loads, mock_runcmd):
+ def test_crosfleet_run_client(self, mock_json_loads, mock_runcmd):
def FakeDownloadResult(l, task_id):
if l and task_id:
@@ -279,10 +282,10 @@
}
self.mock_json.loads = mock_json_loads
- self.mock_label.skylab = True
+ self.mock_label.crosfleet = True
self.runner.DownloadResult = FakeDownloadResult
- res = self.runner.Skylab_Run(self.mock_label, self.crosperf_wrapper_bench,
- '', '')
+ res = self.runner.Crosfleet_Run(self.mock_label,
+ self.crosperf_wrapper_bench, '', '')
ret_tup = (0, '\nResults placed in tmp/swarming-12345\n', '')
self.assertEqual(res, ret_tup)
self.assertEqual(mock_runcmd.call_count, 2)
@@ -293,7 +296,7 @@
self.assertEqual(args_dict['command_terminator'], self.mock_cmd_term)
args_list = mock_runcmd.call_args_list[1][0]
- self.assertEqual(args_list[0], ('skylab wait-task 12345'))
+ self.assertEqual(args_list[0], ('crosfleet wait-task 12345'))
self.assertEqual(args_dict['command_terminator'], self.mock_cmd_term)
diff --git a/crosperf/test_cache/compare_output/results.txt b/crosperf/test_cache/compare_output/results.pickle
similarity index 86%
rename from crosperf/test_cache/compare_output/results.txt
rename to crosperf/test_cache/compare_output/results.pickle
index 592e716..587863c 100644
--- a/crosperf/test_cache/compare_output/results.txt
+++ b/crosperf/test_cache/compare_output/results.pickle
Binary files differ
diff --git a/crosperf/test_cache/test_input/results.txt b/crosperf/test_cache/test_input/results.pickle
similarity index 100%
rename from crosperf/test_cache/test_input/results.txt
rename to crosperf/test_cache/test_input/results.pickle
diff --git a/crosperf/test_cache/test_puretelemetry_input/results.txt b/crosperf/test_cache/test_puretelemetry_input/results.pickle
similarity index 100%
rename from crosperf/test_cache/test_puretelemetry_input/results.txt
rename to crosperf/test_cache/test_puretelemetry_input/results.pickle
diff --git a/cwp/cr-os/fetch_gn_descs.py b/cwp/cr-os/fetch_gn_descs.py
index 60d331c..8a0b2e4 100755
--- a/cwp/cr-os/fetch_gn_descs.py
+++ b/cwp/cr-os/fetch_gn_descs.py
@@ -23,7 +23,6 @@
import argparse
import json
-# pylint: disable=cros-logging-import
import logging
import os
import subprocess
diff --git a/debug_info_test/check_cus.py b/debug_info_test/check_cus.py
index d3cd636..4112325 100644
--- a/debug_info_test/check_cus.py
+++ b/debug_info_test/check_cus.py
@@ -49,7 +49,7 @@
comp_path = ''
readelf = subprocess.Popen(
- ['readelf', '--debug-dump=info', '--dwarf-depth=1', dso_path],
+ ['llvm-dwarfdump', '--recurse-depth=0', dso_path],
stdout=subprocess.PIPE,
stderr=open(os.devnull, 'w'),
encoding='utf-8')
diff --git a/debug_info_test/check_exist.py b/debug_info_test/check_exist.py
index f2cc7c6..898dae4 100644
--- a/debug_info_test/check_exist.py
+++ b/debug_info_test/check_exist.py
@@ -85,7 +85,7 @@
"""
readelf = subprocess.Popen(
- ['readelf', '--debug-dump=info', '--dwarf-depth=1', dso_path],
+ ['llvm-dwarfdump', '--recurse-depth=0', dso_path],
stdout=subprocess.PIPE,
stderr=open(os.devnull, 'w'),
encoding='utf-8')
diff --git a/llvm_extra/create_llvm_extra.sh b/llvm_extra/create_llvm_extra.sh
index 6f34a0b..b58e050 100755
--- a/llvm_extra/create_llvm_extra.sh
+++ b/llvm_extra/create_llvm_extra.sh
@@ -82,7 +82,7 @@
set -e
-# Sanity checks.
+# Confidence checks.
check_cmd "${@}"
# Create llvm-extra ebuild.
create_llvm_extra_ebuild "${@}"
diff --git a/llvm_tools/README.md b/llvm_tools/README.md
index 783ec22..74fad6c 100644
--- a/llvm_tools/README.md
+++ b/llvm_tools/README.md
@@ -119,6 +119,7 @@
$ ./update_chromeos_llvm_hash.py \
--update_packages sys-devel/llvm sys-libs/compiler-rt \
sys-libs/libcxx sys-libs/libcxxabi sys-libs/llvm-libunwind \
+ 'dev-util/lldb-server' \
--llvm_version 367622 \
--failure_mode disable_patches
```
@@ -360,30 +361,6 @@
--custom_script /abs/path/to/script.py
```
-### `update_all_tryjobs_with_auto.py`
-
-#### Usage
-
-This script updates all tryjobs that are 'pending' to the result provided by
-`cros buildresult`.
-
-For example:
-
-```
-$ ./update_all_tryjobs_with_auto.py \
- --last_tested /abs/path/to/last_tested_file.json \
- --chroot_path /abs/path/to/chroot
-```
-
-The above example will update all tryjobs whose 'status' is 'pending' in the
-file provided by `--last_tested`.
-
-For help with the command line arguments of the script, run:
-
-```
-$ ./update_all_tryjobs_with_auto.py --help
-```
-
### `modify_a_tryjob.py`
#### Usage
@@ -496,18 +473,20 @@
**Tip**: if you put a symlink called `git-llvm-rev` to this script somewhere on
your `$PATH`, you can also use it as `git llvm-rev`.
-### `cherrypick_cl.py`
+### `get_upstream_patch.py`
#### Usage
-This script updates the proper ChromeOS packages with an LLVM cherrypick of your choosing, and
-copies the cherrypick into patch folders of the packages.
+This script updates the proper ChromeOS packages with LLVM patches of your choosing, and
+copies the patches into patch folders of the packages. This tool supports both git hash
+of commits as well as differential reviews.
Usage:
```
-./cherrypick_cl.py --chroot_path /abs/path/to/chroot --start_sha llvm
+./get_upstream_patch.py --chroot_path /abs/path/to/chroot --start_sha llvm
--sha 174c3eb69f19ff2d6a3eeae31d04afe77e62c021 --sha 174c3eb69f19ff2d6a3eeae31d04afe77e62c021
+--differential D123456
```
It tries to autodetect a lot of things (e.g., packages changed by each sha,
@@ -517,6 +496,10 @@
### `revert_checker.py`
+**This script is copied from upstream LLVM. Please prefer to make upstream edits,
+rather than modifying this script. It's kept in a CrOS repo so we don't need an
+LLVM tree to `import` this from scripts here.**
+
This script reports reverts which happen 'across' a certain LLVM commit.
To clarify the meaning of 'across' with an example, if we had the following
@@ -542,11 +525,23 @@
This is an automated wrapper around `revert_checker.py`. It checks to see if any
new reverts happened across toolchains that we're trying to ship since it was
-last run. If so, it sends emails to appropriate groups.
+last run. If so, it either automatically cherry-picks the reverts, or sends
+emails to appropriate groups.
-Usage example:
+Usage example for cherry-picking:
```
PYTHONPATH=../ ./nightly_revert_checker.py \
+ cherry-pick
+ --state_file state.json \
+ --llvm_dir llvm-project-copy \
+ --chromeos_dir ../../../../
+ [email protected]
+```
+
+Usage example for email:
+```
+PYTHONPATH=../ ./nightly_revert_checker.py \
+ email
--state_file state.json \
--llvm_dir llvm-project-copy \
--chromeos_dir ../../../../
diff --git a/llvm_tools/auto_llvm_bisection.py b/llvm_tools/auto_llvm_bisection.py
index dd29cf4..7e8fb1d 100755
--- a/llvm_tools/auto_llvm_bisection.py
+++ b/llvm_tools/auto_llvm_bisection.py
@@ -8,6 +8,8 @@
from __future__ import print_function
+import enum
+import json
import os
import subprocess
import sys
@@ -17,7 +19,7 @@
import chroot
from llvm_bisection import BisectionExitStatus
import llvm_bisection
-from update_all_tryjobs_with_auto import GetPathToUpdateAllTryjobsWithAutoScript
+import update_tryjob_status
# Used to re-try for 'llvm_bisection.py' to attempt to launch more tryjobs.
BISECTION_RETRY_TIME_SECS = 10 * 60
@@ -39,6 +41,52 @@
POLLING_LIMIT_SECS = 18 * 60 * 60
+class BuilderStatus(enum.Enum):
+ """Actual values given via 'cros buildresult'."""
+
+ PASS = 'pass'
+ FAIL = 'fail'
+ RUNNING = 'running'
+
+
+builder_status_mapping = {
+ BuilderStatus.PASS.value: update_tryjob_status.TryjobStatus.GOOD.value,
+ BuilderStatus.FAIL.value: update_tryjob_status.TryjobStatus.BAD.value,
+ BuilderStatus.RUNNING.value: update_tryjob_status.TryjobStatus.PENDING.value
+}
+
+
+def GetBuildResult(chroot_path, buildbucket_id):
+ """Returns the conversion of the result of 'cros buildresult'."""
+
+ # Calls 'cros buildresult' to get the status of the tryjob.
+ try:
+ tryjob_json = subprocess.check_output(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id',
+ str(buildbucket_id), '--report', 'json'
+ ],
+ cwd=chroot_path,
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
+ except subprocess.CalledProcessError as err:
+ if 'No build found. Perhaps not started' not in err.output:
+ raise
+ return None
+
+ tryjob_content = json.loads(tryjob_json)
+
+ build_result = str(tryjob_content['%d' % buildbucket_id]['status'])
+
+ # The string returned by 'cros buildresult' might not be in the mapping.
+ if build_result not in builder_status_mapping:
+ raise ValueError('"cros buildresult" return value is invalid: %s' %
+ build_result)
+
+ return builder_status_mapping[build_result]
+
+
def main():
"""Bisects LLVM using the result of `cros buildresult` of each tryjob.
@@ -50,51 +98,58 @@
args_output = llvm_bisection.GetCommandLineArgs()
- exec_update_tryjobs = [
- GetPathToUpdateAllTryjobsWithAutoScript(), '--chroot_path',
- args_output.chroot_path, '--last_tested', args_output.last_tested
- ]
-
if os.path.isfile(args_output.last_tested):
print('Resuming bisection for %s' % args_output.last_tested)
else:
print('Starting a new bisection for %s' % args_output.last_tested)
while True:
+ # Update the status of existing tryjobs
if os.path.isfile(args_output.last_tested):
update_start_time = time.time()
-
- # Update all tryjobs whose status is 'pending' to the result of `cros
- # buildresult`.
+ with open(args_output.last_tested) as json_file:
+ json_dict = json.load(json_file)
while True:
print('\nAttempting to update all tryjobs whose "status" is '
'"pending":')
print('-' * 40)
- update_ret = subprocess.call(exec_update_tryjobs)
+ completed = True
+ for tryjob in json_dict['jobs']:
+ if tryjob[
+ 'status'] == update_tryjob_status.TryjobStatus.PENDING.value:
+ status = GetBuildResult(args_output.chroot_path,
+ tryjob['buildbucket_id'])
+ if status:
+ tryjob['status'] = status
+ else:
+ completed = False
print('-' * 40)
- # Successfully updated all tryjobs whose 'status' was 'pending'/ no
- # updates were needed (all tryjobs already have been updated).
- if update_ret == 0:
+ # Proceed to the next step if all the existing tryjobs have completed.
+ if completed:
break
delta_time = time.time() - update_start_time
if delta_time > POLLING_LIMIT_SECS:
- print('Unable to update tryjobs whose status is "pending" to '
- 'the result of `cros buildresult`.')
-
# Something is wrong with updating the tryjobs's 'status' via
# `cros buildresult` (e.g. network issue, etc.).
- sys.exit(1)
+ sys.exit('Failed to update pending tryjobs.')
+ print('-' * 40)
print('Sleeping for %d minutes.' % (POLL_RETRY_TIME_SECS // 60))
time.sleep(POLL_RETRY_TIME_SECS)
- # Launch more tryjobs if possible to narrow down the bad commit/revision or
- # terminate the bisection because the bad commit/revision was found.
+ # There should always be update from the tryjobs launched in the
+ # last iteration.
+ temp_filename = '%s.new' % args_output.last_tested
+ with open(temp_filename, 'w') as temp_file:
+ json.dump(json_dict, temp_file, indent=4, separators=(',', ': '))
+ os.rename(temp_filename, args_output.last_tested)
+
+ # Launch more tryjobs.
for cur_try in range(1, BISECTION_ATTEMPTS + 1):
try:
print('\nAttempting to launch more tryjobs if possible:')
@@ -104,8 +159,7 @@
print('-' * 40)
- # Exit code 126 means that there are no more revisions to test between
- # 'start' and 'end', so bisection is complete.
+ # Stop if the bisection has completed.
if bisection_ret == BisectionExitStatus.BISECTION_COMPLETE.value:
sys.exit(0)
@@ -118,9 +172,7 @@
# Exceeded the number of times to launch more tryjobs.
if cur_try == BISECTION_ATTEMPTS:
- print('Unable to continue bisection.')
-
- sys.exit(1)
+ sys.exit('Unable to continue bisection.')
num_retries_left = BISECTION_ATTEMPTS - cur_try
diff --git a/llvm_tools/auto_llvm_bisection_unittest.py b/llvm_tools/auto_llvm_bisection_unittest.py
index 56b556e..07c0e71 100755
--- a/llvm_tools/auto_llvm_bisection_unittest.py
+++ b/llvm_tools/auto_llvm_bisection_unittest.py
@@ -8,6 +8,7 @@
from __future__ import print_function
+import json
import os
import subprocess
import time
@@ -19,140 +20,59 @@
import chroot
import llvm_bisection
import test_helpers
+import update_tryjob_status
class AutoLLVMBisectionTest(unittest.TestCase):
"""Unittests for auto bisection of LLVM."""
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
@mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
- @mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when an exception happened in
- # `llvm_bisection.main()`.
- @mock.patch.object(traceback, 'print_exc')
- # Simulate behavior of `llvm_bisection.main()` when failed to launch tryjobs
- # (exception happened along the way, etc.).
- @mock.patch.object(llvm_bisection, 'main')
- # Simulate behavior of `os.path.isfile()` when starting a new bisection.
- @mock.patch.object(os.path, 'isfile', return_value=False)
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
@mock.patch.object(
llvm_bisection,
'GetCommandLineArgs',
return_value=test_helpers.ArgsOutputTest())
- def testFailedToStartBisection(
- self, mock_get_args, mock_get_auto_script, mock_is_file,
- mock_llvm_bisection, mock_traceback, mock_sleep, mock_outside_chroot):
-
- def MockLLVMBisectionRaisesException(_args_output):
- raise ValueError('Failed to launch more tryjobs.')
-
- # Use the test function to simulate the behavior of an exception happening
- # when launching more tryjobs.
- mock_llvm_bisection.side_effect = MockLLVMBisectionRaisesException
-
- # Verify the exception is raised when the number of attempts to launched
- # more tryjobs is exceeded, so unable to continue
- # bisection.
- with self.assertRaises(SystemExit) as err:
- auto_llvm_bisection.main()
-
- self.assertEqual(err.exception.code, 1)
-
- mock_outside_chroot.assert_called_once()
- mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 2)
- self.assertEqual(mock_llvm_bisection.call_count, 3)
- self.assertEqual(mock_traceback.call_count, 3)
- self.assertEqual(mock_sleep.call_count, 2)
-
- # Simulate the behavior of `subprocess.call()` when successfully updated all
- # tryjobs whose 'status' value is 'pending'.
- @mock.patch.object(subprocess, 'call', return_value=0)
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
- @mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
@mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when an exception happened in
- # `llvm_bisection.main()`.
@mock.patch.object(traceback, 'print_exc')
- # Simulate behavior of `llvm_bisection.main()` when failed to launch tryjobs
- # (exception happened along the way, etc.).
@mock.patch.object(llvm_bisection, 'main')
- # Simulate behavior of `os.path.isfile()` when starting a new bisection.
@mock.patch.object(os.path, 'isfile')
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
- @mock.patch.object(
- llvm_bisection,
- 'GetCommandLineArgs',
- return_value=test_helpers.ArgsOutputTest())
- def testSuccessfullyBisectedLLVMRevision(
- self, mock_get_args, mock_get_auto_script, mock_is_file,
- mock_llvm_bisection, mock_traceback, mock_sleep, mock_outside_chroot,
- mock_update_tryjobs):
+ @mock.patch.object(auto_llvm_bisection, 'open')
+ @mock.patch.object(json, 'load')
+ @mock.patch.object(auto_llvm_bisection, 'GetBuildResult')
+ @mock.patch.object(os, 'rename')
+ def testAutoLLVMBisectionPassed(
+ self,
+ # pylint: disable=unused-argument
+ mock_rename,
+ mock_get_build_result,
+ mock_json_load,
+ # pylint: disable=unused-argument
+ mock_open,
+ mock_isfile,
+ mock_llvm_bisection,
+ mock_traceback,
+ mock_sleep,
+ mock_get_args,
+ mock_outside_chroot):
- # Simulate the behavior of `os.path.isfile()` when checking whether the
- # status file provided exists.
- @test_helpers.CallCountsToMockFunctions
- def MockStatusFileCheck(call_count, _last_tested):
- # Simulate that the status file does not exist, so the LLVM bisection
- # script would create the status file and launch tryjobs.
- if call_count < 2:
- return False
-
- # Simulate when the status file exists and `subprocess.call()` executes
- # the script that updates all the 'pending' tryjobs to the result of `cros
- # buildresult`.
- if call_count == 2:
- return True
-
- assert False, 'os.path.isfile() called more times than expected.'
-
- # Simulate behavior of `llvm_bisection.main()` when successfully bisected
- # between the good and bad LLVM revision.
- @test_helpers.CallCountsToMockFunctions
- def MockLLVMBisectionReturnValue(call_count, _args_output):
- # Simulate that successfully launched more tryjobs.
- if call_count == 0:
- return 0
-
- # Simulate that failed to launch more tryjobs.
- if call_count == 1:
- raise ValueError('Failed to launch more tryjobs.')
-
- # Simulate that the bad revision has been found.
- if call_count == 2:
- return llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value
-
- assert False, 'Called `llvm_bisection.main()` more than expected.'
-
- # Use the test function to simulate the behavior of `llvm_bisection.main()`.
- mock_llvm_bisection.side_effect = MockLLVMBisectionReturnValue
-
- # Use the test function to simulate the behavior of `os.path.isfile()`.
- mock_is_file.side_effect = MockStatusFileCheck
+ mock_isfile.side_effect = [False, False, True, True]
+ mock_llvm_bisection.side_effect = [
+ 0,
+ ValueError('Failed to launch more tryjobs.'),
+ llvm_bisection.BisectionExitStatus.BISECTION_COMPLETE.value
+ ]
+ mock_json_load.return_value = {
+ 'start':
+ 369410,
+ 'end':
+ 369420,
+ 'jobs': [{
+ 'buildbucket_id': 12345,
+ 'rev': 369411,
+ 'status': update_tryjob_status.TryjobStatus.PENDING.value,
+ }]
+ }
+ mock_get_build_result.return_value = (
+ update_tryjob_status.TryjobStatus.GOOD.value)
# Verify the excpetion is raised when successfully found the bad revision.
# Uses `sys.exit(0)` to indicate success.
@@ -163,43 +83,65 @@
mock_outside_chroot.assert_called_once()
mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 3)
+ self.assertEqual(mock_isfile.call_count, 3)
self.assertEqual(mock_llvm_bisection.call_count, 3)
mock_traceback.assert_called_once()
mock_sleep.assert_called_once()
- mock_update_tryjobs.assert_called_once()
- # Simulate behavior of `subprocess.call()` when failed to update tryjobs to
- # `cros buildresult` (script failed).
- @mock.patch.object(subprocess, 'call', return_value=1)
- # Simulate behavior of `time.time()` when determining the time passed when
- # updating tryjobs whose 'status' is 'pending'.
- @mock.patch.object(time, 'time')
- # Simulate the behavior of `VerifyOutsideChroot()` when successfully invoking
- # the script outside of the chroot.
@mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
- # Simulate behavior of `time.sleep()` when waiting for errors to settle caused
- # by `llvm_bisection.main()` (e.g. network issue, etc.).
@mock.patch.object(time, 'sleep')
- # Simulate behavior of `traceback.print_exc()` when resuming bisection.
- @mock.patch.object(os.path, 'isfile', return_value=True)
- # Simulate behavior of `GetPathToUpdateAllTryjobsWithAutoScript()` when
- # returning the absolute path to that script that updates all 'pending'
- # tryjobs to the result of `cros buildresult`.
- @mock.patch.object(
- auto_llvm_bisection,
- 'GetPathToUpdateAllTryjobsWithAutoScript',
- return_value='/abs/path/to/update_tryjob.py')
- # Simulate `llvm_bisection.GetCommandLineArgs()` when parsing the command line
- # arguments required by the bisection script.
+ @mock.patch.object(traceback, 'print_exc')
+ @mock.patch.object(llvm_bisection, 'main')
+ @mock.patch.object(os.path, 'isfile')
@mock.patch.object(
llvm_bisection,
'GetCommandLineArgs',
return_value=test_helpers.ArgsOutputTest())
+ def testFailedToStartBisection(self, mock_get_args, mock_isfile,
+ mock_llvm_bisection, mock_traceback,
+ mock_sleep, mock_outside_chroot):
+
+ mock_isfile.return_value = False
+ mock_llvm_bisection.side_effect = ValueError(
+ 'Failed to launch more tryjobs.')
+
+ # Verify the exception is raised when the number of attempts to launched
+ # more tryjobs is exceeded, so unable to continue
+ # bisection.
+ with self.assertRaises(SystemExit) as err:
+ auto_llvm_bisection.main()
+
+ self.assertEqual(err.exception.code, 'Unable to continue bisection.')
+
+ mock_outside_chroot.assert_called_once()
+ mock_get_args.assert_called_once()
+ self.assertEqual(mock_isfile.call_count, 2)
+ self.assertEqual(mock_llvm_bisection.call_count, 3)
+ self.assertEqual(mock_traceback.call_count, 3)
+ self.assertEqual(mock_sleep.call_count, 2)
+
+ @mock.patch.object(chroot, 'VerifyOutsideChroot', return_value=True)
+ @mock.patch.object(
+ llvm_bisection,
+ 'GetCommandLineArgs',
+ return_value=test_helpers.ArgsOutputTest())
+ @mock.patch.object(time, 'time')
+ @mock.patch.object(time, 'sleep')
+ @mock.patch.object(os.path, 'isfile')
+ @mock.patch.object(auto_llvm_bisection, 'open')
+ @mock.patch.object(json, 'load')
+ @mock.patch.object(auto_llvm_bisection, 'GetBuildResult')
def testFailedToUpdatePendingTryJobs(
- self, mock_get_args, mock_get_auto_script, mock_is_file, mock_sleep,
- mock_outside_chroot, mock_time, mock_update_tryjobs):
+ self,
+ mock_get_build_result,
+ mock_json_load,
+ # pylint: disable=unused-argument
+ mock_open,
+ mock_isfile,
+ mock_sleep,
+ mock_time,
+ mock_get_args,
+ mock_outside_chroot):
# Simulate behavior of `time.time()` for time passed.
@test_helpers.CallCountsToMockFunctions
@@ -209,9 +151,20 @@
assert False, 'Called `time.time()` more than expected.'
- # Use the test function to simulate the behavior of `time.time()`.
+ mock_isfile.return_value = True
+ mock_json_load.return_value = {
+ 'start':
+ 369410,
+ 'end':
+ 369420,
+ 'jobs': [{
+ 'buildbucket_id': 12345,
+ 'rev': 369411,
+ 'status': update_tryjob_status.TryjobStatus.PENDING.value,
+ }]
+ }
+ mock_get_build_result.return_value = None
mock_time.side_effect = MockTimePassed
-
# Reduce the polling limit for the test case to terminate faster.
auto_llvm_bisection.POLLING_LIMIT_SECS = 1
@@ -220,15 +173,80 @@
with self.assertRaises(SystemExit) as err:
auto_llvm_bisection.main()
- self.assertEqual(err.exception.code, 1)
+ self.assertEqual(err.exception.code, 'Failed to update pending tryjobs.')
mock_outside_chroot.assert_called_once()
mock_get_args.assert_called_once()
- mock_get_auto_script.assert_called_once()
- self.assertEqual(mock_is_file.call_count, 2)
+ self.assertEqual(mock_isfile.call_count, 2)
mock_sleep.assert_called_once()
self.assertEqual(mock_time.call_count, 3)
- self.assertEqual(mock_update_tryjobs.call_count, 2)
+
+ @mock.patch.object(subprocess, 'check_output')
+ def testGetBuildResult(self, mock_chroot_command):
+ buildbucket_id = 192
+ status = auto_llvm_bisection.BuilderStatus.PASS.value
+ tryjob_contents = {buildbucket_id: {'status': status}}
+ mock_chroot_command.return_value = json.dumps(tryjob_contents)
+ chroot_path = '/some/path/to/chroot'
+
+ self.assertEqual(
+ auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id),
+ update_tryjob_status.TryjobStatus.GOOD.value)
+
+ mock_chroot_command.assert_called_once_with(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id',
+ str(buildbucket_id), '--report', 'json'
+ ],
+ cwd='/some/path/to/chroot',
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
+
+ @mock.patch.object(subprocess, 'check_output')
+ def testGetBuildResultPassedWithUnstartedTryjob(self, mock_chroot_command):
+ buildbucket_id = 192
+ chroot_path = '/some/path/to/chroot'
+ mock_chroot_command.side_effect = subprocess.CalledProcessError(
+ returncode=1, cmd=[], output='No build found. Perhaps not started')
+ auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id)
+ mock_chroot_command.assert_called_once_with(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id', '192',
+ '--report', 'json'
+ ],
+ cwd=chroot_path,
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
+
+ @mock.patch.object(subprocess, 'check_output')
+ def testGetBuildReusultFailedWithInvalidBuildStatus(self,
+ mock_chroot_command):
+ chroot_path = '/some/path/to/chroot'
+ buildbucket_id = 50
+ invalid_build_status = 'querying'
+ tryjob_contents = {buildbucket_id: {'status': invalid_build_status}}
+ mock_chroot_command.return_value = json.dumps(tryjob_contents)
+
+ # Verify the exception is raised when the return value of `cros buildresult`
+ # is not in the `builder_status_mapping`.
+ with self.assertRaises(ValueError) as err:
+ auto_llvm_bisection.GetBuildResult(chroot_path, buildbucket_id)
+
+ self.assertEqual(
+ str(err.exception),
+ '"cros buildresult" return value is invalid: %s' % invalid_build_status)
+
+ mock_chroot_command.assert_called_once_with(
+ [
+ 'cros_sdk', '--', 'cros', 'buildresult', '--buildbucket-id',
+ str(buildbucket_id), '--report', 'json'
+ ],
+ cwd=chroot_path,
+ stderr=subprocess.STDOUT,
+ encoding='UTF-8',
+ )
if __name__ == '__main__':
diff --git a/llvm_tools/bisect_clang_crashes.py b/llvm_tools/bisect_clang_crashes.py
index e8ee2ab..c53db17 100755
--- a/llvm_tools/bisect_clang_crashes.py
+++ b/llvm_tools/bisect_clang_crashes.py
@@ -7,8 +7,6 @@
"""Fetches and submits the artifacts from Chrome OS toolchain's crash bucket.
"""
-# pylint: disable=cros-logging-import
-
import argparse
import glob
import json
diff --git a/llvm_tools/bisect_clang_crashes_unittest.py b/llvm_tools/bisect_clang_crashes_unittest.py
index c914345..a3dc0c6 100755
--- a/llvm_tools/bisect_clang_crashes_unittest.py
+++ b/llvm_tools/bisect_clang_crashes_unittest.py
@@ -6,7 +6,6 @@
"""Tests for bisect_clang_crashes."""
-# pylint: disable=cros-logging-import
import glob
import logging
import os.path
diff --git a/llvm_tools/cherrypick_cl.py b/llvm_tools/cherrypick_cl.py
deleted file mode 100755
index 9e30672..0000000
--- a/llvm_tools/cherrypick_cl.py
+++ /dev/null
@@ -1,250 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-# pylint: disable=cros-logging-import
-
-"""Adds a cherrypick to LLVM's PATCHES.json."""
-
-from __future__ import print_function
-
-import argparse
-import json
-import logging
-import os
-import shlex
-import subprocess
-import sys
-
-import chroot
-import get_llvm_hash
-import git
-import git_llvm_rev
-import update_chromeos_llvm_hash
-
-
-def add_cherrypick(patches_json_path: str, patches_dir: str,
- relative_patches_dir: str, start_version: git_llvm_rev.Rev,
- llvm_dir: str, rev: git_llvm_rev.Rev, sha: str,
- package: str):
- with open(patches_json_path, encoding='utf-8') as f:
- patches_json = json.load(f)
-
- file_name = sha + '.patch'
- rel_patch_path = os.path.join(relative_patches_dir, file_name)
-
- for p in patches_json:
- rel_path = p['rel_patch_path']
- if rel_path == rel_patch_path:
- raise ValueError('Patch at %r already exists in PATCHES.json' % rel_path)
- if sha in rel_path:
- logging.warning(
- 'Similarly-named patch already exists in PATCHES.json: %r', rel_path)
-
- with open(os.path.join(patches_dir, file_name), 'wb') as f:
- cmd = ['git', 'show', sha]
- # Only apply the part of the patch that belongs to this package, expect
- # LLVM. This is because some packages are built with LLVM ebuild on X86 but
- # not on the other architectures. e.g. compiler-rt. Therefore always apply
- # the entire patch to LLVM ebuild as a workaround.
- if package != 'llvm':
- cmd.append(package_to_project(package))
- subprocess.check_call(cmd, stdout=f, cwd=llvm_dir)
-
- commit_subject = subprocess.check_output(
- ['git', 'log', '-n1', '--format=%s', sha], cwd=llvm_dir, encoding='utf-8')
-
- patches_json.append({
- 'comment': commit_subject.strip(),
- 'rel_patch_path': rel_patch_path,
- 'start_version': start_version.number,
- 'end_version': rev.number,
- })
-
- temp_file = patches_json_path + '.tmp'
- with open(temp_file, 'w', encoding='utf-8') as f:
- json.dump(patches_json, f, indent=4, separators=(',', ': '))
- os.rename(temp_file, patches_json_path)
-
-
-def parse_ebuild_for_assignment(ebuild_path: str, var_name: str) -> str:
- # '_pre' filters the LLVM 9.0 ebuild, which we never want to target, from
- # this list.
- candidates = [
- x for x in os.listdir(ebuild_path)
- if x.endswith('.ebuild') and '_pre' in x
- ]
-
- if not candidates:
- raise ValueError('No ebuilds found under %r' % ebuild_path)
-
- ebuild = os.path.join(ebuild_path, max(candidates))
- with open(ebuild, encoding='utf-8') as f:
- var_name_eq = var_name + '='
- for orig_line in f:
- if not orig_line.startswith(var_name_eq):
- continue
-
- # We shouldn't see much variety here, so do the simplest thing possible.
- line = orig_line[len(var_name_eq):]
- # Remove comments
- line = line.split('#')[0]
- # Remove quotes
- line = shlex.split(line)
- if len(line) != 1:
- raise ValueError('Expected exactly one quoted value in %r' % orig_line)
- return line[0].strip()
-
- raise ValueError('No %s= line found in %r' % (var_name, ebuild))
-
-
-# Resolves a git ref (or similar) to a LLVM SHA.
-def resolve_llvm_ref(llvm_dir: str, sha: str) -> str:
- return subprocess.check_output(
- ['git', 'rev-parse', sha],
- encoding='utf-8',
- cwd=llvm_dir,
- ).strip()
-
-
-# Get the package name of an LLVM project
-def project_to_package(project: str) -> str:
- if project == 'libunwind':
- return 'llvm-libunwind'
- return project
-
-
-# Get the LLVM project name of a package
-def package_to_project(package: str) -> str:
- if package == 'llvm-libunwind':
- return 'libunwind'
- return package
-
-
-# Get the LLVM projects change in the specifed sha
-def get_package_names(sha: str, llvm_dir: str) -> list:
- paths = subprocess.check_output(
- ['git', 'show', '--name-only', '--format=', sha],
- cwd=llvm_dir,
- encoding='utf-8').splitlines()
- # Some LLVM projects are built by LLVM ebuild on X86, so always apply the
- # patch to LLVM ebuild
- packages = {'llvm'}
- # Detect if there are more packages to apply the patch to
- for path in paths:
- package = project_to_package(path.split('/')[0])
- if package in ('compiler-rt', 'libcxx', 'libcxxabi', 'llvm-libunwind'):
- packages.add(package)
- packages = list(sorted(packages))
- return packages
-
-
-def main():
- chroot.VerifyOutsideChroot()
- logging.basicConfig(
- format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
- level=logging.INFO,
- )
-
- parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument(
- '--chroot_path',
- default=os.path.join(os.path.expanduser('~'), 'chromiumos'),
- help='the path to the chroot (default: %(default)s)')
- parser.add_argument(
- '--start_sha',
- default='llvm-next',
- help='LLVM SHA that the patch should start applying at. You can specify '
- '"llvm" or "llvm-next", as well. Defaults to %(default)s.')
- parser.add_argument(
- '--sha',
- required=True,
- action='append',
- help='The LLVM git SHA to cherry-pick.')
- parser.add_argument(
- '--create_cl',
- default=False,
- action='store_true',
- help='Automatically create a CL if specified')
- args = parser.parse_args()
-
- llvm_symlink = chroot.ConvertChrootPathsToAbsolutePaths(
- args.chroot_path,
- chroot.GetChrootEbuildPaths(args.chroot_path, ['sys-devel/llvm']))[0]
- llvm_symlink_dir = os.path.dirname(llvm_symlink)
-
- git_status = subprocess.check_output(['git', 'status', '-s'],
- cwd=llvm_symlink_dir,
- encoding='utf-8')
- if git_status:
- raise ValueError('Uncommited changes detected in %s' %
- os.path.dirname(os.path.dirname(llvm_symlink_dir)))
-
- start_sha = args.start_sha
- if start_sha == 'llvm':
- start_sha = parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_HASH')
- elif start_sha == 'llvm-next':
- start_sha = parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_NEXT_HASH')
- logging.info('Base llvm hash == %s', start_sha)
-
- llvm_config = git_llvm_rev.LLVMConfig(
- remote='origin', dir=get_llvm_hash.GetAndUpdateLLVMProjectInLLVMTools())
-
- start_sha = resolve_llvm_ref(llvm_config.dir, start_sha)
- start_rev = git_llvm_rev.translate_sha_to_rev(llvm_config, start_sha)
-
- if args.create_cl:
- branch = 'cherry-pick'
- git.CreateBranch(llvm_symlink_dir, branch)
- symlinks_to_uprev = []
- commit_messages = [
- 'llvm: cherry-pick CLs from upstream\n',
- ]
-
- for sha in args.sha:
- sha = resolve_llvm_ref(llvm_config.dir, sha)
- rev = git_llvm_rev.translate_sha_to_rev(llvm_config, sha)
- # Find out the llvm projects changed in this commit
- packages = get_package_names(sha, llvm_config.dir)
- # Find out the ebuild symlinks of the corresponding ChromeOS packages
- symlinks = chroot.GetChrootEbuildPaths(args.chroot_path, [
- 'sys-devel/llvm' if package == 'llvm' else 'sys-libs/' + package
- for package in packages
- ])
- symlinks = chroot.ConvertChrootPathsToAbsolutePaths(args.chroot_path,
- symlinks)
-
- # Create a patch and add its metadata for each package
- for package, symlink in zip(packages, symlinks):
- symlink_dir = os.path.dirname(symlink)
- patches_json_path = os.path.join(symlink_dir, 'files/PATCHES.json')
- relative_patches_dir = 'cherry' if package == 'llvm' else ''
- patches_dir = os.path.join(symlink_dir, 'files', relative_patches_dir)
- logging.info('Cherrypicking %s (%s) into %s', rev, sha, package)
-
- add_cherrypick(patches_json_path, patches_dir, relative_patches_dir,
- start_rev, llvm_config.dir, rev, sha, package)
- if args.create_cl:
- symlinks_to_uprev.extend(symlinks)
- commit_messages.extend([
- '\n\nreviews.llvm.org/rG%s\n' % sha,
- subprocess.check_output(['git', 'log', '-n1', '--oneline', sha],
- cwd=llvm_config.dir,
- encoding='utf-8')
- ])
-
- logging.info('Complete.')
-
- if args.create_cl:
- symlinks_to_uprev = list(sorted(set(symlinks_to_uprev)))
- for symlink in symlinks_to_uprev:
- update_chromeos_llvm_hash.UprevEbuildSymlink(symlink)
- subprocess.check_output(['git', 'add', '--all'], cwd=symlink_dir)
- git.UploadChanges(llvm_symlink_dir, branch, commit_messages)
- git.DeleteBranch(llvm_symlink_dir, branch)
-
-
-if __name__ == '__main__':
- sys.exit(main())
diff --git a/llvm_tools/fetch_cros_sdk_rolls.py b/llvm_tools/fetch_cros_sdk_rolls.py
index 42af678..83d7025 100755
--- a/llvm_tools/fetch_cros_sdk_rolls.py
+++ b/llvm_tools/fetch_cros_sdk_rolls.py
@@ -10,8 +10,6 @@
the toolchain ebuild ${x} go live?"
"""
-# pylint: disable=cros-logging-import
-
import argparse
import json
import logging
diff --git a/llvm_tools/get_llvm_hash.py b/llvm_tools/get_llvm_hash.py
index 329e829..83b5ae7 100755
--- a/llvm_tools/get_llvm_hash.py
+++ b/llvm_tools/get_llvm_hash.py
@@ -9,16 +9,18 @@
from __future__ import print_function
import argparse
+import contextlib
+import functools
import os
+import re
import shutil
import subprocess
import sys
import tempfile
-from contextlib import contextmanager
import git_llvm_rev
-from subprocess_helpers import CheckCommand
from subprocess_helpers import check_output
+from subprocess_helpers import CheckCommand
_LLVM_GIT_URL = ('https://chromium.googlesource.com/external/github.com/llvm'
'/llvm-project')
@@ -63,7 +65,66 @@
git_llvm_rev.Rev(branch=git_llvm_rev.MAIN_BRANCH, number=version))
-@contextmanager
+def CheckoutBranch(src_dir, branch):
+ """Checks out and pulls from a branch in a git repo.
+
+ Args:
+ src_dir: The LLVM source tree.
+ branch: The git branch to checkout in src_dir.
+
+ Raises:
+ ValueError: Failed to checkout or pull branch version
+ """
+ CheckCommand(['git', '-C', src_dir, 'checkout', branch])
+ CheckCommand(['git', '-C', src_dir, 'pull'])
+
+
+def ParseLLVMMajorVersion(cmakelist):
+ """Reads CMakeList.txt file contents for LLVMMajor Version.
+
+ Args:
+ cmakelist: contents of CMakeList.txt
+
+ Returns:
+ The major version number as a string
+
+ Raises:
+ ValueError: The major version cannot be parsed from cmakelist
+ """
+ match = re.search(r'\n\s+set\(LLVM_VERSION_MAJOR (?P<major>\d+)\)', cmakelist)
+ if not match:
+ raise ValueError('Failed to parse CMakeList for llvm major version')
+ return match.group('major')
+
+
[email protected]_cache(maxsize=1)
+def GetLLVMMajorVersion(git_hash=None):
+ """Reads llvm/CMakeList.txt file contents for LLVMMajor Version.
+
+ Args:
+ git_hash: git hash of llvm version as string or None for top of trunk
+
+ Returns:
+ The major version number as a string
+
+ Raises:
+ ValueError: The major version cannot be parsed from cmakelist or
+ there was a failure to checkout git_hash version
+ FileExistsError: The src directory doe not contain CMakeList.txt
+ """
+ src_dir = GetAndUpdateLLVMProjectInLLVMTools()
+ cmakelists_path = os.path.join(src_dir, 'llvm', 'CMakeLists.txt')
+ if git_hash:
+ CheckCommand(['git', '-C', src_dir, 'checkout', git_hash])
+ try:
+ with open(cmakelists_path) as cmakelists_file:
+ return ParseLLVMMajorVersion(cmakelists_file.read())
+ finally:
+ if git_hash:
+ CheckoutBranch(src_dir, git_llvm_rev.MAIN_BRANCH)
+
+
[email protected]
def CreateTempLLVMRepo(temp_dir):
"""Adds a LLVM worktree to 'temp_dir'.
@@ -77,7 +138,7 @@
temp_dir: An absolute path to the temporary directory to put the worktree in
(obtained via 'tempfile.mkdtemp()').
- Returns:
+ Yields:
The absolute path to 'temp_dir'.
Raises:
@@ -88,7 +149,8 @@
abs_path_to_llvm_project_dir = GetAndUpdateLLVMProjectInLLVMTools()
CheckCommand([
'git', '-C', abs_path_to_llvm_project_dir, 'worktree', 'add', '--detach',
- temp_dir, git_llvm_rev.MAIN_BRANCH
+ temp_dir,
+ 'origin/%s' % git_llvm_rev.MAIN_BRANCH
])
try:
@@ -113,6 +175,9 @@
LLVM mirror. In either case, this function will return the absolute path to
'llvm-project-copy' directory.
+ Returns:
+ Absolute path to 'llvm-project-copy' directory in 'llvm_tools'
+
Raises:
ValueError: LLVM repo (in 'llvm-project-copy' dir.) has changes or failed to
checkout to main or failed to fetch from chromium mirror of LLVM.
@@ -125,8 +190,9 @@
if not os.path.isdir(abs_path_to_llvm_project_dir):
print(
- 'Checking out LLVM from scratch. This could take a while...\n'
- '(This should only need to be done once, though.)',
+ (f'Checking out LLVM to {abs_path_to_llvm_project_dir}\n'
+ 'so that we can map between commit hashes and revision numbers.\n'
+ 'This may take a while, but only has to be done once.'),
file=sys.stderr)
os.mkdir(abs_path_to_llvm_project_dir)
@@ -142,11 +208,7 @@
raise ValueError('LLVM repo in %s has changes, please remove.' %
abs_path_to_llvm_project_dir)
- CheckCommand([
- 'git', '-C', abs_path_to_llvm_project_dir, 'checkout',
- git_llvm_rev.MAIN_BRANCH
- ])
- CheckCommand(['git', '-C', abs_path_to_llvm_project_dir, 'pull'])
+ CheckoutBranch(abs_path_to_llvm_project_dir, git_llvm_rev.MAIN_BRANCH)
return abs_path_to_llvm_project_dir
@@ -154,6 +216,9 @@
def GetGoogle3LLVMVersion(stable):
"""Gets the latest google3 LLVM version.
+ Args:
+ stable: boolean, use the stable version or the unstable version
+
Returns:
The latest LLVM SVN version as an integer.
@@ -178,7 +243,7 @@
return GetVersionFrom(GetAndUpdateLLVMProjectInLLVMTools(), git_hash.rstrip())
-def is_svn_option(svn_option):
+def IsSvnOption(svn_option):
"""Validates whether the argument (string) is a git hash option.
The argument is used to find the git hash of LLVM.
@@ -186,6 +251,10 @@
Args:
svn_option: The option passed in as a command line argument.
+ Returns:
+ lowercase svn_option if it is a known hash source, otherwise the svn_option
+ as an int
+
Raises:
ValueError: Invalid svn option provided.
"""
@@ -212,7 +281,7 @@
Args:
svn_option: A valid svn option obtained from the command line.
- Ex: 'google3', 'tot', or <svn_version> such as 365123.
+ Ex. 'google3', 'tot', or <svn_version> such as 365123.
Returns:
A tuple that is the LLVM git hash and LLVM version.
@@ -240,7 +309,7 @@
"""Provides methods to retrieve a LLVM hash."""
@staticmethod
- @contextmanager
+ @contextlib.contextmanager
def CreateTempDirectory():
temp_dir = tempfile.mkdtemp()
@@ -310,7 +379,7 @@
parser = argparse.ArgumentParser(description='Finds the LLVM hash.')
parser.add_argument(
'--llvm_version',
- type=is_svn_option,
+ type=IsSvnOption,
required=True,
help='which git hash of LLVM to find. Either a svn revision, or one '
'of %s' % sorted(KNOWN_HASH_SOURCES))
diff --git a/llvm_tools/get_llvm_hash_unittest.py b/llvm_tools/get_llvm_hash_unittest.py
index 2e56aed..49740f3 100755
--- a/llvm_tools/get_llvm_hash_unittest.py
+++ b/llvm_tools/get_llvm_hash_unittest.py
@@ -90,6 +90,50 @@
self.assertEqual(LLVMHash().GetTopOfTrunkGitHash(), 'a123testhash1')
mock_check_output.assert_called_once()
+ @mock.patch.object(subprocess, 'Popen')
+ def testCheckoutBranch(self, mock_popen):
+ mock_popen.return_value = mock.MagicMock(
+ communicate=lambda: (None, None), returncode=0)
+ get_llvm_hash.CheckoutBranch('fake/src_dir', 'fake_branch')
+ self.assertEqual(
+ mock_popen.call_args_list[0][0],
+ (['git', '-C', 'fake/src_dir', 'checkout', 'fake_branch'],))
+ self.assertEqual(mock_popen.call_args_list[1][0],
+ (['git', '-C', 'fake/src_dir', 'pull'],))
+
+ def testParseLLVMMajorVersion(self):
+ cmakelist_42 = ('set(CMAKE_BUILD_WITH_INSTALL_NAME_DIR ON)\n'
+ 'if(NOT DEFINED LLVM_VERSION_MAJOR)\n'
+ ' set(LLVM_VERSION_MAJOR 42)\n'
+ 'endif()')
+ self.assertEqual(get_llvm_hash.ParseLLVMMajorVersion(cmakelist_42), '42')
+
+ def testParseLLVMMajorVersionInvalid(self):
+ invalid_cmakelist = 'invalid cmakelist.txt contents'
+ with self.assertRaises(ValueError):
+ get_llvm_hash.ParseLLVMMajorVersion(invalid_cmakelist)
+
+ @mock.patch.object(get_llvm_hash, 'GetAndUpdateLLVMProjectInLLVMTools')
+ @mock.patch.object(get_llvm_hash, 'ParseLLVMMajorVersion')
+ @mock.patch.object(get_llvm_hash, 'CheckCommand')
+ @mock.patch.object(get_llvm_hash, 'CheckoutBranch')
+ @mock.patch(
+ 'get_llvm_hash.open',
+ mock.mock_open(read_data='mock contents'),
+ create=True)
+ def testGetLLVMMajorVersion(self, mock_checkout_branch, mock_git_checkout,
+ mock_major_version, mock_llvm_project_path):
+ mock_llvm_project_path.return_value = 'path/to/llvm-project'
+ mock_major_version.return_value = '1234'
+ self.assertEqual(get_llvm_hash.GetLLVMMajorVersion('314159265'), '1234')
+ # Second call should be memoized
+ self.assertEqual(get_llvm_hash.GetLLVMMajorVersion('314159265'), '1234')
+ mock_llvm_project_path.assert_called_once()
+ mock_major_version.assert_called_with('mock contents')
+ mock_git_checkout.assert_called_once_with(
+ ['git', '-C', 'path/to/llvm-project', 'checkout', '314159265'])
+ mock_checkout_branch.assert_called_once_with('path/to/llvm-project', 'main')
+
if __name__ == '__main__':
unittest.main()
diff --git a/llvm_tools/get_upstream_patch.py b/llvm_tools/get_upstream_patch.py
new file mode 100755
index 0000000..5669b02
--- /dev/null
+++ b/llvm_tools/get_upstream_patch.py
@@ -0,0 +1,465 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright 2020 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Get an upstream patch to LLVM's PATCHES.json."""
+
+import argparse
+import json
+import logging
+import os
+import shlex
+import subprocess
+import sys
+import typing as t
+from datetime import datetime
+
+import dataclasses
+
+import chroot
+import get_llvm_hash
+import git
+import git_llvm_rev
+import update_chromeos_llvm_hash
+
+__DOC_EPILOGUE = """
+Example Usage:
+ get_upstream_patch --chroot_path ~/chromiumos --platform chromiumos \
+--sha 1234567 --sha 890abdc
+"""
+
+
+class CherrypickError(ValueError):
+ """A ValueError that highlights the cherry-pick has been seen before"""
+
+
+def add_patch(patches_json_path: str, patches_dir: str,
+ relative_patches_dir: str, start_version: git_llvm_rev.Rev,
+ llvm_dir: str, rev: t.Union[git_llvm_rev.Rev, str], sha: str,
+ package: str, platforms: t.List[str]):
+ """Gets the start and end intervals in 'json_file'.
+
+ Args:
+ patches_json_path: The absolute path to PATCHES.json.
+ patches_dir: The aboslute path to the directory patches are in.
+ relative_patches_dir: The relative path to PATCHES.json.
+ start_version: The base LLVM revision this patch applies to.
+ llvm_dir: The path to LLVM checkout.
+ rev: An LLVM revision (git_llvm_rev.Rev) for a cherrypicking, or a
+ differential revision (str) otherwise.
+ sha: The LLVM git sha that corresponds to the patch. For differential
+ revisions, the git sha from the local commit created by 'arc patch'
+ is used.
+ package: The LLVM project name this patch applies to.
+ platforms: List of platforms this patch applies to.
+
+ Raises:
+ CherrypickError: A ValueError that highlights the cherry-pick has been
+ seen before.
+ """
+
+ with open(patches_json_path, encoding='utf-8') as f:
+ patches_json = json.load(f)
+
+ is_cherrypick = isinstance(rev, git_llvm_rev.Rev)
+ if is_cherrypick:
+ file_name = f'{sha}.patch'
+ else:
+ file_name = f'{rev}.patch'
+ rel_patch_path = os.path.join(relative_patches_dir, file_name)
+
+ for p in patches_json:
+ rel_path = p['rel_patch_path']
+ if rel_path == rel_patch_path:
+ raise CherrypickError(
+ f'Patch at {rel_path} already exists in PATCHES.json')
+ if is_cherrypick:
+ if sha in rel_path:
+ logging.warning(
+ 'Similarly-named patch already exists in PATCHES.json: %r',
+ rel_path)
+
+ with open(os.path.join(patches_dir, file_name), 'wb') as f:
+ cmd = ['git', 'show', sha]
+ # Only apply the part of the patch that belongs to this package, expect
+ # LLVM. This is because some packages are built with LLVM ebuild on X86 but
+ # not on the other architectures. e.g. compiler-rt. Therefore always apply
+ # the entire patch to LLVM ebuild as a workaround.
+ if package != 'llvm':
+ cmd.append(package_to_project(package))
+ subprocess.check_call(cmd, stdout=f, cwd=llvm_dir)
+
+ commit_subject = subprocess.check_output(
+ ['git', 'log', '-n1', '--format=%s', sha],
+ cwd=llvm_dir,
+ encoding='utf-8')
+
+ end_vers = rev.number if isinstance(rev, git_llvm_rev.Rev) else None
+ patch_props = {
+ 'rel_patch_path': rel_patch_path,
+ 'metadata': {
+ 'title': commit_subject.strip(),
+ 'info': [],
+ },
+ 'platforms': sorted(platforms),
+ 'version_range': {
+ 'from': start_version.number,
+ 'until': end_vers,
+ },
+ }
+ patches_json.append(patch_props)
+
+ temp_file = patches_json_path + '.tmp'
+ with open(temp_file, 'w', encoding='utf-8') as f:
+ json.dump(patches_json,
+ f,
+ indent=4,
+ separators=(',', ': '),
+ sort_keys=True)
+ f.write('\n')
+ os.rename(temp_file, patches_json_path)
+
+
+def parse_ebuild_for_assignment(ebuild_path: str, var_name: str) -> str:
+ # '_pre' filters the LLVM 9.0 ebuild, which we never want to target, from
+ # this list.
+ candidates = [
+ x for x in os.listdir(ebuild_path)
+ if x.endswith('.ebuild') and '_pre' in x
+ ]
+
+ if not candidates:
+ raise ValueError('No ebuilds found under %r' % ebuild_path)
+
+ ebuild = os.path.join(ebuild_path, max(candidates))
+ with open(ebuild, encoding='utf-8') as f:
+ var_name_eq = var_name + '='
+ for orig_line in f:
+ if not orig_line.startswith(var_name_eq):
+ continue
+
+ # We shouldn't see much variety here, so do the simplest thing possible.
+ line = orig_line[len(var_name_eq):]
+ # Remove comments
+ line = line.split('#')[0]
+ # Remove quotes
+ line = shlex.split(line)
+ if len(line) != 1:
+ raise ValueError('Expected exactly one quoted value in %r' % orig_line)
+ return line[0].strip()
+
+ raise ValueError('No %s= line found in %r' % (var_name, ebuild))
+
+
+# Resolves a git ref (or similar) to a LLVM SHA.
+def resolve_llvm_ref(llvm_dir: str, sha: str) -> str:
+ return subprocess.check_output(
+ ['git', 'rev-parse', sha],
+ encoding='utf-8',
+ cwd=llvm_dir,
+ ).strip()
+
+
+# Get the package name of an LLVM project
+def project_to_package(project: str) -> str:
+ if project == 'libunwind':
+ return 'llvm-libunwind'
+ return project
+
+
+# Get the LLVM project name of a package
+def package_to_project(package: str) -> str:
+ if package == 'llvm-libunwind':
+ return 'libunwind'
+ return package
+
+
+# Get the LLVM projects change in the specifed sha
+def get_package_names(sha: str, llvm_dir: str) -> list:
+ paths = subprocess.check_output(
+ ['git', 'show', '--name-only', '--format=', sha],
+ cwd=llvm_dir,
+ encoding='utf-8').splitlines()
+ # Some LLVM projects are built by LLVM ebuild on X86, so always apply the
+ # patch to LLVM ebuild
+ packages = {'llvm'}
+ # Detect if there are more packages to apply the patch to
+ for path in paths:
+ package = project_to_package(path.split('/')[0])
+ if package in ('compiler-rt', 'libcxx', 'libcxxabi', 'llvm-libunwind'):
+ packages.add(package)
+ packages = list(sorted(packages))
+ return packages
+
+
+def create_patch_for_packages(packages: t.List[str], symlinks: t.List[str],
+ start_rev: git_llvm_rev.Rev,
+ rev: t.Union[git_llvm_rev.Rev, str], sha: str,
+ llvm_dir: str, platforms: t.List[str]):
+ """Create a patch and add its metadata for each package"""
+ for package, symlink in zip(packages, symlinks):
+ symlink_dir = os.path.dirname(symlink)
+ patches_json_path = os.path.join(symlink_dir, 'files/PATCHES.json')
+ relative_patches_dir = 'cherry' if package == 'llvm' else ''
+ patches_dir = os.path.join(symlink_dir, 'files', relative_patches_dir)
+ logging.info('Getting %s (%s) into %s', rev, sha, package)
+ add_patch(patches_json_path,
+ patches_dir,
+ relative_patches_dir,
+ start_rev,
+ llvm_dir,
+ rev,
+ sha,
+ package,
+ platforms=platforms)
+
+
+def make_cl(symlinks_to_uprev: t.List[str], llvm_symlink_dir: str, branch: str,
+ commit_messages: t.List[str], reviewers: t.Optional[t.List[str]],
+ cc: t.Optional[t.List[str]]):
+ symlinks_to_uprev = sorted(set(symlinks_to_uprev))
+ for symlink in symlinks_to_uprev:
+ update_chromeos_llvm_hash.UprevEbuildSymlink(symlink)
+ subprocess.check_output(['git', 'add', '--all'],
+ cwd=os.path.dirname(symlink))
+ git.UploadChanges(llvm_symlink_dir, branch, commit_messages, reviewers, cc)
+ git.DeleteBranch(llvm_symlink_dir, branch)
+
+
+def resolve_symbolic_sha(start_sha: str, llvm_symlink_dir: str) -> str:
+ if start_sha == 'llvm':
+ return parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_HASH')
+
+ if start_sha == 'llvm-next':
+ return parse_ebuild_for_assignment(llvm_symlink_dir, 'LLVM_NEXT_HASH')
+
+ return start_sha
+
+
+def find_patches_and_make_cl(
+ chroot_path: str, patches: t.List[str], start_rev: git_llvm_rev.Rev,
+ llvm_config: git_llvm_rev.LLVMConfig, llvm_symlink_dir: str,
+ create_cl: bool, skip_dependencies: bool,
+ reviewers: t.Optional[t.List[str]], cc: t.Optional[t.List[str]],
+ platforms: t.List[str]):
+
+ converted_patches = [
+ _convert_patch(llvm_config, skip_dependencies, p) for p in patches
+ ]
+ potential_duplicates = _get_duplicate_shas(converted_patches)
+ if potential_duplicates:
+ err_msg = '\n'.join(f'{a.patch} == {b.patch}'
+ for a, b in potential_duplicates)
+ raise RuntimeError(f'Found Duplicate SHAs:\n{err_msg}')
+
+ # CL Related variables, only used if `create_cl`
+ symlinks_to_uprev = []
+ commit_messages = [
+ 'llvm: get patches from upstream\n',
+ ]
+ branch = f'get-upstream-{datetime.now().strftime("%Y%m%d%H%M%S%f")}'
+
+ if create_cl:
+ git.CreateBranch(llvm_symlink_dir, branch)
+
+ for parsed_patch in converted_patches:
+ # Find out the llvm projects changed in this commit
+ packages = get_package_names(parsed_patch.sha, llvm_config.dir)
+ # Find out the ebuild symlinks of the corresponding ChromeOS packages
+ symlinks = chroot.GetChrootEbuildPaths(chroot_path, [
+ 'sys-devel/llvm' if package == 'llvm' else 'sys-libs/' + package
+ for package in packages
+ ])
+ symlinks = chroot.ConvertChrootPathsToAbsolutePaths(chroot_path, symlinks)
+ # Create a local patch for all the affected llvm projects
+ create_patch_for_packages(packages,
+ symlinks,
+ start_rev,
+ parsed_patch.rev,
+ parsed_patch.sha,
+ llvm_config.dir,
+ platforms=platforms)
+ if create_cl:
+ symlinks_to_uprev.extend(symlinks)
+
+ commit_messages.extend([
+ parsed_patch.git_msg(),
+ subprocess.check_output(
+ ['git', 'log', '-n1', '--oneline', parsed_patch.sha],
+ cwd=llvm_config.dir,
+ encoding='utf-8')
+ ])
+
+ if parsed_patch.is_differential:
+ subprocess.check_output(['git', 'reset', '--hard', 'HEAD^'],
+ cwd=llvm_config.dir)
+
+ if create_cl:
+ make_cl(symlinks_to_uprev, llvm_symlink_dir, branch, commit_messages,
+ reviewers, cc)
+
+
[email protected](frozen=True)
+class ParsedPatch:
+ """Class to keep track of bundled patch info."""
+ patch: str
+ sha: str
+ is_differential: bool
+ rev: t.Union[git_llvm_rev.Rev, str]
+
+ def git_msg(self) -> str:
+ if self.is_differential:
+ return f'\n\nreviews.llvm.org/{self.patch}\n'
+ return f'\n\nreviews.llvm.org/rG{self.sha}\n'
+
+
+def _convert_patch(llvm_config: git_llvm_rev.LLVMConfig,
+ skip_dependencies: bool, patch: str) -> ParsedPatch:
+ """Extract git revision info from a patch.
+
+ Args:
+ llvm_config: LLVM configuration object.
+ skip_dependencies: Pass --skip-dependecies for to `arc`
+ patch: A single patch referent string.
+
+ Returns:
+ A [ParsedPatch] object.
+ """
+
+ # git hash should only have lower-case letters
+ is_differential = patch.startswith('D')
+ if is_differential:
+ subprocess.check_output(
+ [
+ 'arc', 'patch', '--nobranch',
+ '--skip-dependencies' if skip_dependencies else '--revision', patch
+ ],
+ cwd=llvm_config.dir,
+ )
+ sha = resolve_llvm_ref(llvm_config.dir, 'HEAD')
+ rev = patch
+ else:
+ sha = resolve_llvm_ref(llvm_config.dir, patch)
+ rev = git_llvm_rev.translate_sha_to_rev(llvm_config, sha)
+ return ParsedPatch(patch=patch,
+ sha=sha,
+ rev=rev,
+ is_differential=is_differential)
+
+
+def _get_duplicate_shas(patches: t.List[ParsedPatch]
+ ) -> t.List[t.Tuple[ParsedPatch, ParsedPatch]]:
+ """Return a list of Patches which have duplicate SHA's"""
+ return [(left, right) for i, left in enumerate(patches)
+ for right in patches[i + 1:] if left.sha == right.sha]
+
+
+def get_from_upstream(chroot_path: str,
+ create_cl: bool,
+ start_sha: str,
+ patches: t.List[str],
+ platforms: t.List[str],
+ skip_dependencies: bool = False,
+ reviewers: t.List[str] = None,
+ cc: t.List[str] = None):
+ llvm_symlink = chroot.ConvertChrootPathsToAbsolutePaths(
+ chroot_path, chroot.GetChrootEbuildPaths(chroot_path,
+ ['sys-devel/llvm']))[0]
+ llvm_symlink_dir = os.path.dirname(llvm_symlink)
+
+ git_status = subprocess.check_output(['git', 'status', '-s'],
+ cwd=llvm_symlink_dir,
+ encoding='utf-8')
+
+ if git_status:
+ error_path = os.path.dirname(os.path.dirname(llvm_symlink_dir))
+ raise ValueError(f'Uncommited changes detected in {error_path}')
+
+ start_sha = resolve_symbolic_sha(start_sha, llvm_symlink_dir)
+ logging.info('Base llvm hash == %s', start_sha)
+
+ llvm_config = git_llvm_rev.LLVMConfig(
+ remote='origin', dir=get_llvm_hash.GetAndUpdateLLVMProjectInLLVMTools())
+ start_sha = resolve_llvm_ref(llvm_config.dir, start_sha)
+
+ find_patches_and_make_cl(chroot_path=chroot_path,
+ patches=patches,
+ platforms=platforms,
+ start_rev=git_llvm_rev.translate_sha_to_rev(
+ llvm_config, start_sha),
+ llvm_config=llvm_config,
+ llvm_symlink_dir=llvm_symlink_dir,
+ create_cl=create_cl,
+ skip_dependencies=skip_dependencies,
+ reviewers=reviewers,
+ cc=cc)
+ logging.info('Complete.')
+
+
+def main():
+ chroot.VerifyOutsideChroot()
+ logging.basicConfig(
+ format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
+ level=logging.INFO,
+ )
+
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=__DOC_EPILOGUE)
+ parser.add_argument('--chroot_path',
+ default=os.path.join(os.path.expanduser('~'),
+ 'chromiumos'),
+ help='the path to the chroot (default: %(default)s)')
+ parser.add_argument(
+ '--start_sha',
+ default='llvm-next',
+ help='LLVM SHA that the patch should start applying at. You can specify '
+ '"llvm" or "llvm-next", as well. Defaults to %(default)s.')
+ parser.add_argument('--sha',
+ action='append',
+ default=[],
+ help='The LLVM git SHA to cherry-pick.')
+ parser.add_argument(
+ '--differential',
+ action='append',
+ default=[],
+ help='The LLVM differential revision to apply. Example: D1234')
+ parser.add_argument(
+ '--platform',
+ action='append',
+ required=True,
+ help='Apply this patch to the give platform. Common options include '
+ '"chromiumos" and "android". Can be specified multiple times to '
+ 'apply to multiple platforms')
+ parser.add_argument('--create_cl',
+ action='store_true',
+ help='Automatically create a CL if specified')
+ parser.add_argument(
+ '--skip_dependencies',
+ action='store_true',
+ help="Skips a LLVM differential revision's dependencies. Only valid "
+ 'when --differential appears exactly once.')
+ args = parser.parse_args()
+
+ if not (args.sha or args.differential):
+ parser.error('--sha or --differential required')
+
+ if args.skip_dependencies and len(args.differential) != 1:
+ parser.error("--skip_dependencies is only valid when there's exactly one "
+ 'supplied differential')
+
+ get_from_upstream(
+ chroot_path=args.chroot_path,
+ create_cl=args.create_cl,
+ start_sha=args.start_sha,
+ patches=args.sha + args.differential,
+ skip_dependencies=args.skip_dependencies,
+ platforms=args.platform,
+ )
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/llvm_tools/git.py b/llvm_tools/git.py
index f38d5e7..22c7002 100755
--- a/llvm_tools/git.py
+++ b/llvm_tools/git.py
@@ -65,14 +65,14 @@
if not os.path.isdir(repo):
raise ValueError('Invalid directory path provided: %s' % repo)
- subprocess.check_output(['git', '-C', repo, 'checkout', 'cros/master'])
+ subprocess.check_output(['git', '-C', repo, 'checkout', 'cros/main'])
subprocess.check_output(['git', '-C', repo, 'reset', 'HEAD', '--hard'])
subprocess.check_output(['git', '-C', repo, 'branch', '-D', branch])
-def UploadChanges(repo, branch, commit_messages):
+def UploadChanges(repo, branch, commit_messages, reviewers=None, cc=None):
"""Uploads the changes in the specifed branch of the given repo for review.
Args:
@@ -80,6 +80,8 @@
branch: The name of the branch to upload.
commit_messages: A string of commit message(s) (i.e. '[message]'
of the changes made.
+ reviewers: A list of reviewers to add to the CL.
+ cc: A list of contributors to CC about the CL.
Returns:
A nametuple that has two (key, value) pairs, where the first pair is the
@@ -101,12 +103,24 @@
subprocess.check_output(['git', 'commit', '-F', f.name], cwd=repo)
# Upload the changes for review.
+ git_args = [
+ 'repo',
+ 'upload',
+ '--yes',
+ f'--reviewers={",".join(reviewers)}' if reviewers else '--ne',
+ '--no-verify',
+ f'--br={branch}',
+ ]
+
+ if cc:
+ git_args.append(f'--cc={",".join(cc)}')
+
out = subprocess.check_output(
- ['repo', 'upload', '--yes', '--ne', '--no-verify',
- '--br=%s' % branch],
+ git_args,
stderr=subprocess.STDOUT,
cwd=repo,
- encoding='utf-8')
+ encoding='utf-8',
+ )
print(out)
diff --git a/llvm_tools/git_llvm_rev.py b/llvm_tools/git_llvm_rev.py
index c8c1505..b62b26e 100755
--- a/llvm_tools/git_llvm_rev.py
+++ b/llvm_tools/git_llvm_rev.py
@@ -183,6 +183,11 @@
raise ValueError(
f'No viable branches found from {llvm_config.remote} with {sha}')
+ # It seems that some `origin/release/.*` branches have
+ # `origin/upstream/release/.*` equivalents, which is... awkward to deal with.
+ # Prefer the latter, since that seems to have newer commits than the former.
+ # Technically n^2, but len(elements) should be like, tens in the worst case.
+ candidates = [x for x in candidates if f'upstream/{x}' not in candidates]
if len(candidates) != 1:
raise ValueError(
f'Ambiguity: multiple branches from {llvm_config.remote} have {sha}: '
diff --git a/llvm_tools/git_llvm_rev_test.py b/llvm_tools/git_llvm_rev_test.py
index 74280c5..d05093a 100755
--- a/llvm_tools/git_llvm_rev_test.py
+++ b/llvm_tools/git_llvm_rev_test.py
@@ -93,7 +93,7 @@
def test_zz_branch_revs_work_after_merge_points_and_svn_cutoff(self) -> None:
# Arbitrary 9.x commit without an attached llvm-svn: value.
sha = self.rev_to_sha_with_round_trip(
- git_llvm_rev.Rev(branch='release/9.x', number=366670))
+ git_llvm_rev.Rev(branch='upstream/release/9.x', number=366670))
self.assertEqual(sha, '4e858e4ac00b59f064da4e1f7e276916e7d296aa')
def test_zz_branch_revs_work_at_merge_points(self) -> None:
@@ -108,7 +108,7 @@
# branch, we'll pick main for this. That's fine.
sha = git_llvm_rev.translate_rev_to_sha(
get_llvm_config(),
- git_llvm_rev.Rev(branch='release/9.x', number=rev_number))
+ git_llvm_rev.Rev(branch='upstream/release/9.x', number=rev_number))
self.assertEqual(sha, backing_sha)
def test_zz_branch_revs_work_after_merge_points(self) -> None:
@@ -117,7 +117,7 @@
# ours, and are therefore untrustworthy. The commit for this *does* have a
# different `llvm-svn:` string than we should have.
sha = self.rev_to_sha_with_round_trip(
- git_llvm_rev.Rev(branch='release/9.x', number=366427))
+ git_llvm_rev.Rev(branch='upstream/release/9.x', number=366427))
self.assertEqual(sha, '2cf681a11aea459b50d712abc7136f7129e4d57f')
diff --git a/llvm_tools/llvm_bisection.py b/llvm_tools/llvm_bisection.py
index b1898ea..0148efd 100755
--- a/llvm_tools/llvm_bisection.py
+++ b/llvm_tools/llvm_bisection.py
@@ -20,6 +20,7 @@
import get_llvm_hash
import git_llvm_rev
import modify_a_tryjob
+import update_chromeos_llvm_hash
import update_tryjob_status
@@ -51,18 +52,16 @@
'the first bad version (default: %(default)s)')
# Add argument for the good LLVM revision for bisection.
- parser.add_argument(
- '--start_rev',
- required=True,
- type=int,
- help='The good revision for the bisection.')
+ parser.add_argument('--start_rev',
+ required=True,
+ type=int,
+ help='The good revision for the bisection.')
# Add argument for the bad LLVM revision for bisection.
- parser.add_argument(
- '--end_rev',
- required=True,
- type=int,
- help='The bad revision for the bisection.')
+ parser.add_argument('--end_rev',
+ required=True,
+ type=int,
+ help='The bad revision for the bisection.')
# Add argument for the absolute path to the file that contains information on
# the previous tested svn version.
@@ -88,42 +87,38 @@
'of updating the packages')
# Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
+ parser.add_argument('--options',
+ required=False,
+ nargs='+',
+ help='options to use for the tryjob testing')
# Add argument for the builder to use for the tryjob.
- parser.add_argument(
- '--builder', required=True, help='builder to use for the tryjob testing')
+ parser.add_argument('--builder',
+ required=True,
+ help='builder to use for the tryjob testing')
# Add argument for the description of the tryjob.
- parser.add_argument(
- '--description',
- required=False,
- nargs='+',
- help='the description of the tryjob')
+ parser.add_argument('--description',
+ required=False,
+ nargs='+',
+ help='the description of the tryjob')
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=cros_root,
+ help='the path to the chroot (default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--nocleanup',
- action='store_false',
- dest='cleanup',
- help='Abandon CLs created for bisectoin')
+ parser.add_argument('--nocleanup',
+ action='store_false',
+ dest='cleanup',
+ help='Abandon CLs created for bisectoin')
args_output = parser.parse_args()
@@ -174,8 +169,7 @@
all_bad_revisions = [end]
all_bad_revisions.extend(
- cur_tryjob['rev']
- for cur_tryjob in tryjobs
+ cur_tryjob['rev'] for cur_tryjob in tryjobs
if cur_tryjob['status'] == update_tryjob_status.TryjobStatus.BAD.value)
# The minimum value for the 'bad' field in the tryjobs is the new end
@@ -184,8 +178,7 @@
all_good_revisions = [start]
all_good_revisions.extend(
- cur_tryjob['rev']
- for cur_tryjob in tryjobs
+ cur_tryjob['rev'] for cur_tryjob in tryjobs
if cur_tryjob['status'] == update_tryjob_status.TryjobStatus.GOOD.value)
# The maximum value for the 'good' field in the tryjobs is the new start
@@ -205,8 +198,8 @@
pending_revisions = {
tryjob['rev']
for tryjob in tryjobs
- if tryjob['status'] == update_tryjob_status.TryjobStatus.PENDING.value and
- good_rev < tryjob['rev'] < bad_rev
+ if tryjob['status'] == update_tryjob_status.TryjobStatus.PENDING.value
+ and good_rev < tryjob['rev'] < bad_rev
}
# Find all revisions that are to be skipped within 'good_rev' and 'bad_rev'.
@@ -217,8 +210,8 @@
skip_revisions = {
tryjob['rev']
for tryjob in tryjobs
- if tryjob['status'] == update_tryjob_status.TryjobStatus.SKIP.value and
- good_rev < tryjob['rev'] < bad_rev
+ if tryjob['status'] == update_tryjob_status.TryjobStatus.SKIP.value
+ and good_rev < tryjob['rev'] < bad_rev
}
return good_rev, bad_rev, pending_revisions, skip_revisions
@@ -295,66 +288,62 @@
"""
chroot.VerifyOutsideChroot()
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
patch_metadata_file = 'PATCHES.json'
start = args_output.start_rev
end = args_output.end_rev
bisect_state = LoadStatusFile(args_output.last_tested, start, end)
if start != bisect_state['start'] or end != bisect_state['end']:
- raise ValueError(f'The start {start} or the end {end} version provided is '
- f'different than "start" {bisect_state["start"]} or "end" '
- f'{bisect_state["end"]} in the .JSON file')
+ raise ValueError(
+ f'The start {start} or the end {end} version provided is '
+ f'different than "start" {bisect_state["start"]} or "end" '
+ f'{bisect_state["end"]} in the .JSON file')
- # Pending and skipped revisions are between 'start_revision' and
- # 'end_revision'.
- start_revision, end_revision, pending_revisions, skip_revisions = \
- GetRemainingRange(start, end, bisect_state['jobs'])
+ # Pending and skipped revisions are between 'start_rev' and 'end_rev'.
+ start_rev, end_rev, pending_revs, skip_revs = GetRemainingRange(
+ start, end, bisect_state['jobs'])
- revisions, git_hashes = GetCommitsBetween(start_revision, end_revision,
+ revisions, git_hashes = GetCommitsBetween(start_rev, end_rev,
args_output.parallel,
- args_output.src_path,
- pending_revisions, skip_revisions)
+ args_output.src_path, pending_revs,
+ skip_revs)
- # No more revisions between 'start_revision' and 'end_revision', so
+ # No more revisions between 'start_rev' and 'end_rev', so
# bisection is complete.
#
- # This is determined by finding all valid revisions between 'start_revision'
- # and 'end_revision' and that are NOT in the 'pending' and 'skipped' set.
+ # This is determined by finding all valid revisions between 'start_rev'
+ # and 'end_rev' and that are NOT in the 'pending' and 'skipped' set.
if not revisions:
- if pending_revisions:
+ if pending_revs:
# Some tryjobs are not finished which may change the actual bad
# commit/revision when those tryjobs are finished.
- no_revisions_message = (f'No revisions between start {start_revision} '
- f'and end {end_revision} to create tryjobs\n')
+ no_revisions_message = (f'No revisions between start {start_rev} '
+ f'and end {end_rev} to create tryjobs\n')
- if pending_revisions:
- no_revisions_message += (
- 'The following tryjobs are pending:\n' +
- '\n'.join(str(rev) for rev in pending_revisions) + '\n')
+ if pending_revs:
+ no_revisions_message += ('The following tryjobs are pending:\n' +
+ '\n'.join(str(rev)
+ for rev in pending_revs) + '\n')
- if skip_revisions:
+ if skip_revs:
no_revisions_message += ('The following tryjobs were skipped:\n' +
- '\n'.join(str(rev) for rev in skip_revisions) +
- '\n')
+ '\n'.join(str(rev)
+ for rev in skip_revs) + '\n')
raise ValueError(no_revisions_message)
print(f'Finished bisecting for {args_output.last_tested}')
if args_output.src_path:
bad_llvm_hash = get_llvm_hash.GetGitHashFrom(args_output.src_path,
- end_revision)
+ end_rev)
else:
- bad_llvm_hash = get_llvm_hash.LLVMHash().GetLLVMHash(end_revision)
- print(f'The bad revision is {end_revision} and its commit hash is '
+ bad_llvm_hash = get_llvm_hash.LLVMHash().GetLLVMHash(end_rev)
+ print(f'The bad revision is {end_rev} and its commit hash is '
f'{bad_llvm_hash}')
- if skip_revisions:
- skip_revisions_message = ('\nThe following revisions were skipped:\n' +
- '\n'.join(str(rev) for rev in skip_revisions))
- print(skip_revisions_message)
+ if skip_revs:
+ skip_revs_message = ('\nThe following revisions were skipped:\n' +
+ '\n'.join(str(rev) for rev in skip_revs))
+ print(skip_revs_message)
if args_output.cleanup:
# Abandon all the CLs created for bisection
@@ -378,9 +367,9 @@
raise ValueError(f'Revision {rev} exists already in "jobs"')
Bisect(revisions, git_hashes, bisect_state, args_output.last_tested,
- update_packages, args_output.chroot_path, patch_metadata_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builder, args_output.verbose)
+ update_chromeos_llvm_hash.DEFAULT_PACKAGES, args_output.chroot_path,
+ patch_metadata_file, args_output.extra_change_lists,
+ args_output.options, args_output.builder, args_output.verbose)
if __name__ == '__main__':
diff --git a/llvm_tools/modify_a_tryjob.py b/llvm_tools/modify_a_tryjob.py
index 4d41e6b..519fb51 100755
--- a/llvm_tools/modify_a_tryjob.py
+++ b/llvm_tools/modify_a_tryjob.py
@@ -17,9 +17,9 @@
import chroot
import failure_modes
import get_llvm_hash
+import update_chromeos_llvm_hash
import update_packages_and_run_tests
import update_tryjob_status
-import update_chromeos_llvm_hash
class ModifyTryjob(enum.Enum):
@@ -57,11 +57,10 @@
# Add argument that determines which revision to search for in the list of
# tryjobs.
- parser.add_argument(
- '--revision',
- required=True,
- type=int,
- help='The revision to either remove or relaunch.')
+ parser.add_argument('--revision',
+ required=True,
+ type=int,
+ help='The revision to either remove or relaunch.')
# Add argument for other change lists that want to run alongside the tryjob.
parser.add_argument(
@@ -72,40 +71,38 @@
'of updating the packages')
# Add argument for custom options for the tryjob.
- parser.add_argument(
- '--options',
- required=False,
- nargs='+',
- help='options to use for the tryjob testing')
+ parser.add_argument('--options',
+ required=False,
+ nargs='+',
+ help='options to use for the tryjob testing')
# Add argument for the builder to use for the tryjob.
- parser.add_argument('--builder', help='builder to use for the tryjob testing')
+ parser.add_argument('--builder',
+ help='builder to use for the tryjob testing')
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=cros_root,
+ help='the path to the chroot (default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
args_output = parser.parse_args()
- if not os.path.isfile(args_output.status_file) or \
- not args_output.status_file.endswith('.json'):
+ if (not os.path.isfile(args_output.status_file)
+ or not args_output.status_file.endswith('.json')):
raise ValueError('File does not exist or does not ending in ".json" '
': %s' % args_output.status_file)
- if args_output.modify_tryjob == ModifyTryjob.ADD.value and \
- not args_output.builder:
+ if (args_output.modify_tryjob == ModifyTryjob.ADD.value
+ and not args_output.builder):
raise ValueError('A builder is required for adding a tryjob.')
- elif args_output.modify_tryjob != ModifyTryjob.ADD.value and \
- args_output.builder:
+ elif (args_output.modify_tryjob != ModifyTryjob.ADD.value
+ and args_output.builder):
raise ValueError('Specifying a builder is only available when adding a '
'tryjob.')
@@ -234,13 +231,13 @@
bisect_contents['jobs'][tryjob_index]['cl'],
bisect_contents['jobs'][tryjob_index]['extra_cls'],
bisect_contents['jobs'][tryjob_index]['options'],
- bisect_contents['jobs'][tryjob_index]['builder'], chroot_path, verbose)
+ bisect_contents['jobs'][tryjob_index]['builder'], chroot_path)
bisect_contents['jobs'][tryjob_index][
'status'] = update_tryjob_status.TryjobStatus.PENDING.value
bisect_contents['jobs'][tryjob_index]['link'] = tryjob_results[0]['link']
- bisect_contents['jobs'][tryjob_index]['buildbucket_id'] = tryjob_results[0][
- 'buildbucket_id']
+ bisect_contents['jobs'][tryjob_index]['buildbucket_id'] = tryjob_results[
+ 0]['buildbucket_id']
print('Successfully relaunched the tryjob for revision %d and updated '
'the tryjob link to %s' % (revision, tryjob_results[0]['link']))
@@ -253,17 +250,14 @@
# Make sure the revision is within the bounds of the start and end of the
# bisection.
elif bisect_contents['start'] < revision < bisect_contents['end']:
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
patch_metadata_file = 'PATCHES.json'
git_hash, revision = get_llvm_hash.GetLLVMHashAndVersionFromSVNOption(
revision)
- tryjob_dict = AddTryjob(update_packages, git_hash, revision, chroot_path,
+ tryjob_dict = AddTryjob(update_chromeos_llvm_hash.DEFAULT_PACKAGES,
+ git_hash, revision, chroot_path,
patch_metadata_file, extra_cls, options, builder,
verbose, revision)
@@ -277,7 +271,10 @@
modify_tryjob)
with open(status_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
+ json.dump(bisect_contents,
+ update_tryjobs,
+ indent=4,
+ separators=(',', ': '))
def main():
@@ -290,9 +287,9 @@
PerformTryjobModification(args_output.revision,
ModifyTryjob(args_output.modify_tryjob),
args_output.status_file,
- args_output.extra_change_lists, args_output.options,
- args_output.builder, args_output.chroot_path,
- args_output.verbose)
+ args_output.extra_change_lists,
+ args_output.options, args_output.builder,
+ args_output.chroot_path, args_output.verbose)
if __name__ == '__main__':
diff --git a/llvm_tools/nightly_revert_checker.py b/llvm_tools/nightly_revert_checker.py
index 3a23890..8948508 100755
--- a/llvm_tools/nightly_revert_checker.py
+++ b/llvm_tools/nightly_revert_checker.py
@@ -6,12 +6,10 @@
"""Checks for new reverts in LLVM on a nightly basis.
-If any reverts are found that were previously unknown, this fires off an email.
-All LLVM SHAs to monitor are autodetected.
+If any reverts are found that were previously unknown, this cherry-picks them or
+fires off an email. All LLVM SHAs to monitor are autodetected.
"""
-# pylint: disable=cros-logging-import
-
from __future__ import print_function
import argparse
@@ -26,38 +24,50 @@
import cros_utils.email_sender as email_sender
import cros_utils.tiny_render as tiny_render
+
import get_llvm_hash
+import get_upstream_patch
import git_llvm_rev
import revert_checker
State = t.Any
-def _find_interesting_android_shas(
- android_llvm_toolchain_dir: str) -> t.List[t.Tuple[str]]:
+def _find_interesting_android_shas(android_llvm_toolchain_dir: str
+ ) -> t.List[t.Tuple[str, str]]:
llvm_project = os.path.join(android_llvm_toolchain_dir,
'toolchain/llvm-project')
def get_llvm_merge_base(branch: str) -> str:
- return subprocess.check_output(
- ['git', 'merge-base', branch, 'aosp/upstream-master'],
+ head_sha = subprocess.check_output(
+ ['git', 'rev-parse', branch],
cwd=llvm_project,
encoding='utf-8',
).strip()
+ merge_base = subprocess.check_output(
+ ['git', 'merge-base', branch, 'aosp/upstream-main'],
+ cwd=llvm_project,
+ encoding='utf-8',
+ ).strip()
+ logging.info('Merge-base for %s (HEAD == %s) and upstream-main is %s',
+ branch, head_sha, merge_base)
+ return merge_base
- main_legacy = get_llvm_merge_base('aosp/master-legacy')
+ main_legacy = get_llvm_merge_base('aosp/master-legacy') # nocheck
testing_upstream = get_llvm_merge_base('aosp/testing-upstream')
result = [('main-legacy', main_legacy)]
# If these are the same SHA, there's no point in tracking both.
if main_legacy != testing_upstream:
result.append(('testing-upstream', testing_upstream))
+ else:
+ logging.info('main-legacy and testing-upstream are identical; ignoring '
+ 'the latter.')
return result
-def _parse_llvm_ebuild_for_shas(
- ebuild_file: io.TextIOWrapper) -> t.List[t.Tuple[str]]:
-
+def _parse_llvm_ebuild_for_shas(ebuild_file: io.TextIOWrapper
+ ) -> t.List[t.Tuple[str, str]]:
def parse_ebuild_assignment(line: str) -> str:
no_comments = line.split('#')[0]
no_assign = no_comments.split('=', 1)[1].strip()
@@ -84,12 +94,12 @@
return results
-def _find_interesting_chromeos_shas(chromeos_base: str) -> t.List[t.Tuple[str]]:
+def _find_interesting_chromeos_shas(chromeos_base: str
+ ) -> t.List[t.Tuple[str, str]]:
llvm_dir = os.path.join(chromeos_base,
'src/third_party/chromiumos-overlay/sys-devel/llvm')
candidate_ebuilds = [
- os.path.join(llvm_dir, x)
- for x in os.listdir(llvm_dir)
+ os.path.join(llvm_dir, x) for x in os.listdir(llvm_dir)
if '_pre' in x and not os.path.islink(os.path.join(llvm_dir, x))
]
@@ -193,85 +203,14 @@
return {}
-def main(argv: t.List[str]) -> None:
- parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--state_file', required=True, help='File to store persistent state in.')
- parser.add_argument(
- '--llvm_dir', required=True, help='Up-to-date LLVM directory to use.')
- parser.add_argument(
- '--dry_run',
- action='store_true',
- help='Print email contents, rather than sending them.')
- parser.add_argument('--debug', action='store_true')
-
- subparsers = parser.add_subparsers(dest='repository')
- subparsers.required = True
-
- chromeos_subparser = subparsers.add_parser('chromeos')
- chromeos_subparser.add_argument(
- '--chromeos_dir', required=True, help='Up-to-date CrOS directory to use.')
-
- android_subparser = subparsers.add_parser('android')
- android_subparser.add_argument(
- '--android_llvm_toolchain_dir',
- required=True,
- help='Up-to-date android-llvm-toolchain directory to use.')
-
- opts = parser.parse_args(argv)
-
- logging.basicConfig(
- format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
- level=logging.DEBUG if opts.debug else logging.INFO,
- )
-
- dry_run = opts.dry_run
- llvm_dir = opts.llvm_dir
- repository = opts.repository
- state_file = opts.state_file
-
- if repository == 'chromeos':
- interesting_shas = _find_interesting_chromeos_shas(opts.chromeos_dir)
- recipients = _EmailRecipients(well_known=['mage'], direct=[])
- elif repository == 'android':
- interesting_shas = _find_interesting_android_shas(
- opts.android_llvm_toolchain_dir)
- recipients = _EmailRecipients(
- well_known=[], direct=['[email protected]'])
- else:
- raise ValueError('Unknown repository %s' % opts.repository)
-
- logging.info('Interesting SHAs were %r', interesting_shas)
-
- state = _read_state(state_file)
- logging.info('Loaded state\n%s', pprint.pformat(state))
-
- def prettify_sha(sha: str) -> tiny_render.Piece:
- rev = get_llvm_hash.GetVersionFrom(llvm_dir, sha)
-
- # 12 is arbitrary, but should be unambiguous enough.
- short_sha = sha[:12]
- return tiny_render.Switch(
- text='r%s (%s)' % (rev, short_sha),
- html=tiny_render.Link(
- href='https://reviews.llvm.org/rG' + sha, inner='r' + str(rev)),
- )
-
- def get_sha_description(sha: str) -> tiny_render.Piece:
- return subprocess.check_output(
- ['git', 'log', '-n1', '--format=%s', sha],
- cwd=llvm_dir,
- encoding='utf-8',
- ).strip()
-
- new_state: State = {}
- revert_emails_to_send: t.List[t.Tuple[str, t.List[revert_checker
- .Revert]]] = []
+def find_shas(llvm_dir: str, interesting_shas: t.List[t.Tuple[str, str]],
+ state: State, new_state: State):
for friendly_name, sha in interesting_shas:
logging.info('Finding reverts across %s (%s)', friendly_name, sha)
- all_reverts = revert_checker.find_reverts(
- llvm_dir, sha, root='origin/' + git_llvm_rev.MAIN_BRANCH)
+ all_reverts = revert_checker.find_reverts(llvm_dir,
+ sha,
+ root='origin/' +
+ git_llvm_rev.MAIN_BRANCH)
logging.info('Detected the following revert(s) across %s:\n%s',
friendly_name, pprint.pformat(all_reverts))
@@ -288,22 +227,179 @@
logging.info('...All of which have been reported.')
continue
- revert_emails_to_send.append(
- _generate_revert_email(repository, friendly_name, sha, prettify_sha,
- get_sha_description, new_reverts))
+ yield (friendly_name, sha, new_reverts)
- # We want to be as free of obvious side-effects as possible in case something
- # above breaks. Hence, send the email as late as possible.
- for email in revert_emails_to_send:
- if dry_run:
- logging.info('Would send email:\nSubject: %s\nBody:\n%s\n', email.subject,
- tiny_render.render_text_pieces(email.body))
+
+def do_cherrypick(chroot_path: str, llvm_dir: str,
+ interesting_shas: t.List[t.Tuple[str, str]], state: State,
+ reviewers: t.List[str], cc: t.List[str]) -> State:
+ new_state: State = {}
+ seen: t.Set[str] = set()
+ for friendly_name, _sha, reverts in find_shas(llvm_dir, interesting_shas,
+ state, new_state):
+ if friendly_name in seen:
+ continue
+ seen.add(friendly_name)
+ for sha, reverted_sha in reverts:
+ try:
+ # We upload reverts for all platforms by default, since there's no
+ # real reason for them to be CrOS-specific.
+ get_upstream_patch.get_from_upstream(chroot_path=chroot_path,
+ create_cl=True,
+ start_sha=reverted_sha,
+ patches=[sha],
+ reviewers=reviewers,
+ cc=cc,
+ platforms=())
+ except get_upstream_patch.CherrypickError as e:
+ logging.info('%s, skipping...', str(e))
+ return new_state
+
+
+def do_email(is_dry_run: bool, llvm_dir: str, repository: str,
+ interesting_shas: t.List[t.Tuple[str, str]], state: State,
+ recipients: _EmailRecipients) -> State:
+ def prettify_sha(sha: str) -> tiny_render.Piece:
+ rev = get_llvm_hash.GetVersionFrom(llvm_dir, sha)
+
+ # 12 is arbitrary, but should be unambiguous enough.
+ short_sha = sha[:12]
+ return tiny_render.Switch(
+ text=f'r{rev} ({short_sha})',
+ html=tiny_render.Link(href='https://reviews.llvm.org/rG' + sha,
+ inner='r' + str(rev)),
+ )
+
+ def get_sha_description(sha: str) -> tiny_render.Piece:
+ return subprocess.check_output(
+ ['git', 'log', '-n1', '--format=%s', sha],
+ cwd=llvm_dir,
+ encoding='utf-8',
+ ).strip()
+
+ new_state: State = {}
+ for friendly_name, sha, new_reverts in find_shas(llvm_dir, interesting_shas,
+ state, new_state):
+ email = _generate_revert_email(repository, friendly_name, sha,
+ prettify_sha, get_sha_description,
+ new_reverts)
+ if is_dry_run:
+ logging.info('Would send email:\nSubject: %s\nBody:\n%s\n',
+ email.subject, tiny_render.render_text_pieces(email.body))
else:
logging.info('Sending email with subject %r...', email.subject)
_send_revert_email(recipients, email)
logging.info('Email sent.')
+ return new_state
+
+
+def parse_args(argv: t.List[str]) -> t.Any:
+ parser = argparse.ArgumentParser(
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument(
+ 'action',
+ choices=['cherry-pick', 'email', 'dry-run'],
+ help='Automatically cherry-pick upstream reverts, send an email, or '
+ 'write to stdout.')
+ parser.add_argument('--state_file',
+ required=True,
+ help='File to store persistent state in.')
+ parser.add_argument('--llvm_dir',
+ required=True,
+ help='Up-to-date LLVM directory to use.')
+ parser.add_argument('--debug', action='store_true')
+ parser.add_argument(
+ '--reviewers',
+ type=str,
+ nargs='*',
+ help='Requests reviews from REVIEWERS. All REVIEWERS must have existing '
+ 'accounts.')
+ parser.add_argument(
+ '--cc',
+ type=str,
+ nargs='*',
+ help='CCs the CL to the recipients. All recipients must have existing '
+ 'accounts.')
+
+ subparsers = parser.add_subparsers(dest='repository')
+ subparsers.required = True
+
+ chromeos_subparser = subparsers.add_parser('chromeos')
+ chromeos_subparser.add_argument('--chromeos_dir',
+ required=True,
+ help='Up-to-date CrOS directory to use.')
+
+ android_subparser = subparsers.add_parser('android')
+ android_subparser.add_argument(
+ '--android_llvm_toolchain_dir',
+ required=True,
+ help='Up-to-date android-llvm-toolchain directory to use.')
+
+ return parser.parse_args(argv)
+
+
+def find_chroot(opts: t.Any, reviewers: t.List[str], cc: t.List[str]
+ ) -> t.Tuple[str, t.List[t.Tuple[str, str]], _EmailRecipients]:
+ recipients = reviewers + cc
+ if opts.repository == 'chromeos':
+ chroot_path = opts.chromeos_dir
+ return (chroot_path, _find_interesting_chromeos_shas(chroot_path),
+ _EmailRecipients(well_known=['mage'], direct=recipients))
+ elif opts.repository == 'android':
+ if opts.action == 'cherry-pick':
+ raise RuntimeError(
+ "android doesn't currently support automatic cherry-picking.")
+
+ chroot_path = opts.android_llvm_toolchain_dir
+ return (chroot_path, _find_interesting_android_shas(chroot_path),
+ _EmailRecipients(well_known=[],
+ direct=['[email protected]'] +
+ recipients))
+ else:
+ raise ValueError(f'Unknown repository {opts.repository}')
+
+
+def main(argv: t.List[str]) -> int:
+ opts = parse_args(argv)
+
+ logging.basicConfig(
+ format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
+ level=logging.DEBUG if opts.debug else logging.INFO,
+ )
+
+ action = opts.action
+ llvm_dir = opts.llvm_dir
+ repository = opts.repository
+ state_file = opts.state_file
+ reviewers = opts.reviewers if opts.reviewers else []
+ cc = opts.cc if opts.cc else []
+
+ chroot_path, interesting_shas, recipients = find_chroot(opts, reviewers, cc)
+ logging.info('Interesting SHAs were %r', interesting_shas)
+
+ state = _read_state(state_file)
+ logging.info('Loaded state\n%s', pprint.pformat(state))
+
+ # We want to be as free of obvious side-effects as possible in case something
+ # above breaks. Hence, action as late as possible.
+ if action == 'cherry-pick':
+ new_state = do_cherrypick(chroot_path=chroot_path,
+ llvm_dir=llvm_dir,
+ interesting_shas=interesting_shas,
+ state=state,
+ reviewers=reviewers,
+ cc=cc)
+ else:
+ new_state = do_email(is_dry_run=action == 'dry-run',
+ llvm_dir=llvm_dir,
+ repository=repository,
+ interesting_shas=interesting_shas,
+ state=state,
+ recipients=recipients)
_write_state(state_file, new_state)
+ return 0
if __name__ == '__main__':
diff --git a/llvm_tools/nightly_revert_checker_test.py b/llvm_tools/nightly_revert_checker_test.py
index 68338a5..a8ab419 100755
--- a/llvm_tools/nightly_revert_checker_test.py
+++ b/llvm_tools/nightly_revert_checker_test.py
@@ -10,8 +10,10 @@
import io
import unittest
+from unittest.mock import patch
import cros_utils.tiny_render as tiny_render
+import get_upstream_patch
import nightly_revert_checker
import revert_checker
@@ -153,6 +155,43 @@
self.assertIn('Failed to detect SHAs', str(e.exception))
+ @patch('revert_checker.find_reverts')
+ @patch('get_upstream_patch.get_from_upstream')
+ def test_do_cherrypick_is_called(self, do_cherrypick, find_reverts):
+ find_reverts.return_value = [
+ revert_checker.Revert('12345abcdef', 'fedcba54321')
+ ]
+ nightly_revert_checker.do_cherrypick(
+ chroot_path='/path/to/chroot',
+ llvm_dir='/path/to/llvm',
+ interesting_shas=[('12345abcdef', 'fedcba54321')],
+ state={},
+ reviewers=['[email protected]'],
+ cc=['[email protected]'])
+
+ do_cherrypick.assert_called_once()
+ find_reverts.assert_called_once()
+
+ @patch('revert_checker.find_reverts')
+ @patch('get_upstream_patch.get_from_upstream')
+ def test_do_cherrypick_handles_cherrypick_error(self, do_cherrypick,
+ find_reverts):
+ find_reverts.return_value = [
+ revert_checker.Revert('12345abcdef', 'fedcba54321')
+ ]
+ do_cherrypick.side_effect = get_upstream_patch.CherrypickError(
+ 'Patch at 12345abcdef already exists in PATCHES.json')
+ nightly_revert_checker.do_cherrypick(
+ chroot_path='/path/to/chroot',
+ llvm_dir='/path/to/llvm',
+ interesting_shas=[('12345abcdef', 'fedcba54321')],
+ state={},
+ reviewers=['[email protected]'],
+ cc=['[email protected]'])
+
+ do_cherrypick.assert_called_once()
+ find_reverts.assert_called_once()
+
if __name__ == '__main__':
unittest.main()
diff --git a/llvm_tools/patch_manager.py b/llvm_tools/patch_manager.py
index 3c83fa9..f2d6b32 100755
--- a/llvm_tools/patch_manager.py
+++ b/llvm_tools/patch_manager.py
@@ -212,8 +212,13 @@
"""
# Get the metadata values of a patch if possible.
- start_version = patch_dict.get('start_version', 0)
- end_version = patch_dict.get('end_version', None)
+ # FIXME(b/221489531): Remove start_version & end_version
+ if 'version_range' in patch_dict:
+ start_version = patch_dict['version_range'].get('from', 0)
+ end_version = patch_dict['version_range'].get('until', None)
+ else:
+ start_version = patch_dict.get('start_version', 0)
+ end_version = patch_dict.get('end_version', None)
is_critical = patch_dict.get('is_critical', False)
return start_version, end_version, is_critical
diff --git a/llvm_tools/patch_manager_unittest.py b/llvm_tools/patch_manager_unittest.py
index 62947ed..69bb683 100755
--- a/llvm_tools/patch_manager_unittest.py
+++ b/llvm_tools/patch_manager_unittest.py
@@ -182,7 +182,9 @@
test_patch = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_stdout.patch',
- 'end_version': 1000
+ 'version_range': {
+ 'until': 1000,
+ }
}
self.assertEqual(
@@ -275,7 +277,9 @@
patch = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ },
}]
abs_patch_path = '/abs/path/to/filesdir/PATCHES'
@@ -293,13 +297,17 @@
test_updated_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}]
expected_patch_metadata = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}
with CreateTemporaryJsonFile() as json_test_file:
@@ -335,7 +343,9 @@
test_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': rel_patch_path,
- 'start_version': 10
+ 'version_range': {
+ 'from': 10,
+ }
}]
with CreateTemporaryJsonFile() as json_test_file:
@@ -379,7 +389,9 @@
test_patch_metadata = [{
'comment': 'Redirects output to stdout',
'rel_patch_path': rel_patch_path,
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000,
+ },
}]
with CreateTemporaryJsonFile() as json_test_file:
@@ -415,28 +427,36 @@
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1250
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1250
+ }
}
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 900
+ 'version_range': {
+ 'from': 20,
+ 'until': 900
+ }
}
test_patch_metadata = [
@@ -520,28 +540,36 @@
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 2000
+ 'version_range': {
+ 'from': 20,
+ 'until': 2000
+ }
}
test_patch_metadata = [
@@ -654,8 +682,10 @@
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -665,7 +695,9 @@
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -674,8 +706,10 @@
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -684,8 +718,10 @@
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 20,
- 'end_version': 1400
+ 'version_range': {
+ 'from': 20,
+ 'until': 1400
+ }
}
test_patch_metadata = [
@@ -786,8 +822,10 @@
test_patch_1 = {
'comment': 'Redirects output to stdout',
'rel_patch_path': 'cherry/fixes_output.patch',
- 'start_version': 1000,
- 'end_version': 1190
+ 'version_range': {
+ 'from': 1000,
+ 'until': 1190
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -797,7 +835,9 @@
test_patch_2 = {
'comment': 'Fixes input',
'rel_patch_path': 'cherry/fixes_input.patch',
- 'start_version': 1000
+ 'version_range': {
+ 'from': 1000,
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -806,8 +846,10 @@
test_patch_3 = {
'comment': 'Adds a warning',
'rel_patch_path': 'add_warning.patch',
- 'start_version': 750,
- 'end_version': 1500
+ 'version_range': {
+ 'from': 750,
+ 'until': 1500
+ }
}
# For the 'remove_patches' mode, this patch is expected to be in the
@@ -816,8 +858,10 @@
test_patch_4 = {
'comment': 'Adds a helper function',
'rel_patch_path': 'add_helper.patch',
- 'start_version': 1600,
- 'end_version': 2000
+ 'version_range': {
+ 'from': 1600,
+ 'until': 2000
+ }
}
test_patch_metadata = [
diff --git a/llvm_tools/patch_sync/.gitignore b/llvm_tools/patch_sync/.gitignore
new file mode 100644
index 0000000..2f7896d
--- /dev/null
+++ b/llvm_tools/patch_sync/.gitignore
@@ -0,0 +1 @@
+target/
diff --git a/llvm_tools/patch_sync/Cargo.lock b/llvm_tools/patch_sync/Cargo.lock
new file mode 100644
index 0000000..1ad74a7
--- /dev/null
+++ b/llvm_tools/patch_sync/Cargo.lock
@@ -0,0 +1,460 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.51"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b26702f315f53b6071259e15dd9d64528213b44d61de1ec926eca7715d62203"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "clap"
+version = "2.34.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "bitflags",
+ "strsim",
+ "textwrap",
+ "unicode-width",
+ "vec_map",
+]
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "digest"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fcd999463524c52659517fe2cea98493cfe485d10565e7b0fb07dbba7ad2753"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "heck"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.112"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b03d17f364a3a042d5e5d46b053bbbf82c92c9430c592dd4c064dc6ee997125"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "opaque-debug"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
+
+[[package]]
+name = "patch_sync"
+version = "1.1.0"
+dependencies = [
+ "anyhow",
+ "rand",
+ "regex",
+ "scopeguard",
+ "serde",
+ "serde_json",
+ "sha2",
+ "structopt",
+ "time",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed0cfbc8191465bed66e1718596ee0b0b35d5ee1f41c5df2189d0fe8bde535ba"
+
+[[package]]
+name = "proc-macro-error"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
+dependencies = [
+ "proc-macro-error-attr",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro-error-attr"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "version_check",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f84e92c0f7c9d58328b85a78557813e4bd845130db68d7184635344399423b1"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+ "rand_hc",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "rand_hc"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7"
+dependencies = [
+ "rand_core",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "serde"
+version = "1.0.132"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b9875c23cf305cd1fd7eb77234cbb705f21ea6a72c637a5c6db5fe4b8e7f008"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.132"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ecc0db5cb2556c0e558887d9bbdcf6ac4471e83ff66cf696e5419024d1606276"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bcbd0344bc6533bc7ec56df11d42fb70f1b912351c0825ccb7211b59d8af7cf5"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha2"
+version = "0.9.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b69f9a4c9740d74c5baa3fd2e547f9525fa8088a8a958e0ca2409a514e33f5fa"
+dependencies = [
+ "block-buffer",
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+ "opaque-debug",
+]
+
+[[package]]
+name = "strsim"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+
+[[package]]
+name = "structopt"
+version = "0.3.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "40b9788f4202aa75c240ecc9c15c65185e6a39ccdeb0fd5d008b98825464c87c"
+dependencies = [
+ "clap",
+ "lazy_static",
+ "structopt-derive",
+]
+
+[[package]]
+name = "structopt-derive"
+version = "0.4.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0"
+dependencies = [
+ "heck",
+ "proc-macro-error",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.83"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "23a1dfb999630e338648c83e91c59a4e9fb7620f520c3194b6b89e276f2f1959"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "time"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41effe7cfa8af36f439fac33861b66b049edc6f9a32331e2312660529c1c24ad"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "typenum"
+version = "1.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b"
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "vec_map"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
+
+[[package]]
+name = "version_check"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe"
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
diff --git a/llvm_tools/patch_sync/Cargo.toml b/llvm_tools/patch_sync/Cargo.toml
new file mode 100644
index 0000000..ed33d5c
--- /dev/null
+++ b/llvm_tools/patch_sync/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "patch_sync"
+version = "1.1.0"
+authors = ["Jordan R Abrahams-Whitehead <[email protected]>"]
+edition = "2018"
+
+[profile.release]
+panic = "abort"
+
+[dependencies]
+anyhow = "1.0"
+regex = "1.5"
+serde = {version = "1.0", features = ["derive"]}
+serde_json = "1.0"
+sha2 = "0.9"
+structopt = "0.3"
+time = "0.3"
+scopeguard = "1.1.0"
+
+[dev-dependencies]
+rand = "0.8"
diff --git a/llvm_tools/patch_sync/src/android_utils.rs b/llvm_tools/patch_sync/src/android_utils.rs
new file mode 100644
index 0000000..77cb4b8
--- /dev/null
+++ b/llvm_tools/patch_sync/src/android_utils.rs
@@ -0,0 +1,62 @@
+use std::path::Path;
+use std::process::Command;
+
+use anyhow::{bail, ensure, Result};
+
+const LLVM_ANDROID_REL_PATH: &str = "toolchain/llvm_android";
+
+/// Return the Android checkout's current llvm version.
+///
+/// This uses android_version.get_svn_revision_number, a python function
+/// that can't be executed directly. We spawn a Python3 program
+/// to run it and get the result from that.
+pub fn get_android_llvm_version(android_checkout: &Path) -> Result<String> {
+ let mut command = new_android_cmd(android_checkout, "python3")?;
+ command.args([
+ "-c",
+ "import android_version; print(android_version.get_svn_revision_number(), end='')",
+ ]);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "could not get android llvm version: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ let out_string = String::from_utf8(output.stdout)?.trim().to_string();
+ Ok(out_string)
+}
+
+/// Sort the Android patches using the cherrypick_cl.py Android utility.
+///
+/// This assumes that:
+/// 1. There exists a python script called cherrypick_cl.py
+/// 2. That calling it with the given arguments sorts the PATCHES.json file.
+/// 3. Calling it does nothing besides sorting the PATCHES.json file.
+///
+/// We aren't doing our own sorting because we shouldn't have to update patch_sync along
+/// with cherrypick_cl.py any time they change the __lt__ implementation.
+pub fn sort_android_patches(android_checkout: &Path) -> Result<()> {
+ let mut command = new_android_cmd(android_checkout, "python3")?;
+ command.args(["cherrypick_cl.py", "--reason", "patch_sync sorting"]);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "could not sort: {}",
+ String::from_utf8_lossy(&output.stderr)
+ );
+ }
+ Ok(())
+}
+
+fn new_android_cmd(android_checkout: &Path, cmd: &str) -> Result<Command> {
+ let mut command = Command::new(cmd);
+ let llvm_android_dir = android_checkout.join(LLVM_ANDROID_REL_PATH);
+ ensure!(
+ llvm_android_dir.is_dir(),
+ "can't make android command; {} is not a directory",
+ llvm_android_dir.display()
+ );
+ command.current_dir(llvm_android_dir);
+ Ok(command)
+}
diff --git a/llvm_tools/patch_sync/src/main.rs b/llvm_tools/patch_sync/src/main.rs
new file mode 100644
index 0000000..c244f1c
--- /dev/null
+++ b/llvm_tools/patch_sync/src/main.rs
@@ -0,0 +1,332 @@
+mod android_utils;
+mod patch_parsing;
+mod version_control;
+
+use std::borrow::ToOwned;
+use std::collections::BTreeSet;
+use std::path::{Path, PathBuf};
+
+use anyhow::{Context, Result};
+use structopt::StructOpt;
+
+use patch_parsing::{filter_patches_by_platform, PatchCollection, PatchDictSchema};
+use version_control::RepoSetupContext;
+
+fn main() -> Result<()> {
+ match Opt::from_args() {
+ Opt::Show {
+ cros_checkout_path,
+ android_checkout_path,
+ sync,
+ keep_unmerged,
+ } => show_subcmd(ShowOpt {
+ cros_checkout_path,
+ android_checkout_path,
+ sync,
+ keep_unmerged,
+ }),
+ Opt::Transpose {
+ cros_checkout_path,
+ cros_reviewers,
+ old_cros_ref,
+ android_checkout_path,
+ android_reviewers,
+ old_android_ref,
+ sync,
+ verbose,
+ dry_run,
+ no_commit,
+ wip,
+ disable_cq,
+ } => transpose_subcmd(TransposeOpt {
+ cros_checkout_path,
+ cros_reviewers: cros_reviewers
+ .map(|r| r.split(',').map(ToOwned::to_owned).collect())
+ .unwrap_or_default(),
+ old_cros_ref,
+ android_checkout_path,
+ android_reviewers: android_reviewers
+ .map(|r| r.split(',').map(ToOwned::to_owned).collect())
+ .unwrap_or_default(),
+ old_android_ref,
+ sync,
+ verbose,
+ dry_run,
+ no_commit,
+ wip,
+ disable_cq,
+ }),
+ }
+}
+
+struct ShowOpt {
+ cros_checkout_path: PathBuf,
+ android_checkout_path: PathBuf,
+ keep_unmerged: bool,
+ sync: bool,
+}
+
+fn show_subcmd(args: ShowOpt) -> Result<()> {
+ let ShowOpt {
+ cros_checkout_path,
+ android_checkout_path,
+ keep_unmerged,
+ sync,
+ } = args;
+ let ctx = RepoSetupContext {
+ cros_checkout: cros_checkout_path,
+ android_checkout: android_checkout_path,
+ sync_before: sync,
+ wip_mode: true, // Has no effect, as we're not making changes
+ enable_cq: false, // Has no effect, as we're not uploading anything
+ };
+ ctx.setup()?;
+ let make_collection = |platform: &str, patches_fp: &Path| -> Result<PatchCollection> {
+ let parsed_collection = PatchCollection::parse_from_file(patches_fp)
+ .with_context(|| format!("could not parse {} PATCHES.json", platform))?;
+ Ok(if keep_unmerged {
+ parsed_collection
+ } else {
+ filter_patches_by_platform(&parsed_collection, platform).map_patches(|p| {
+ // Need to do this platforms creation as Rust 1.55 cannot use "from".
+ let mut platforms = BTreeSet::new();
+ platforms.insert(platform.to_string());
+ PatchDictSchema {
+ platforms,
+ ..p.clone()
+ }
+ })
+ })
+ };
+ let cur_cros_collection = make_collection("chromiumos", &ctx.cros_patches_path())?;
+ let cur_android_collection = make_collection("android", &ctx.android_patches_path())?;
+ let merged = cur_cros_collection.union(&cur_android_collection)?;
+ println!("{}", merged.serialize_patches()?);
+ Ok(())
+}
+
+struct TransposeOpt {
+ cros_checkout_path: PathBuf,
+ old_cros_ref: String,
+ android_checkout_path: PathBuf,
+ old_android_ref: String,
+ sync: bool,
+ verbose: bool,
+ dry_run: bool,
+ no_commit: bool,
+ cros_reviewers: Vec<String>,
+ android_reviewers: Vec<String>,
+ wip: bool,
+ disable_cq: bool,
+}
+
+fn transpose_subcmd(args: TransposeOpt) -> Result<()> {
+ let ctx = RepoSetupContext {
+ cros_checkout: args.cros_checkout_path,
+ android_checkout: args.android_checkout_path,
+ sync_before: args.sync,
+ wip_mode: args.wip,
+ enable_cq: !args.disable_cq,
+ };
+ ctx.setup()?;
+ let cros_patches_path = ctx.cros_patches_path();
+ let android_patches_path = ctx.android_patches_path();
+
+ // Get new Patches -------------------------------------------------------
+ let (cur_cros_collection, new_cros_patches) = patch_parsing::new_patches(
+ &cros_patches_path,
+ &ctx.old_cros_patch_contents(&args.old_cros_ref)?,
+ "chromiumos",
+ )
+ .context("finding new patches for chromiumos")?;
+ let (cur_android_collection, new_android_patches) = patch_parsing::new_patches(
+ &android_patches_path,
+ &ctx.old_android_patch_contents(&args.old_android_ref)?,
+ "android",
+ )
+ .context("finding new patches for android")?;
+
+ // Have to ignore patches that are already at the destination, even if
+ // the patches are new.
+ let new_cros_patches = new_cros_patches.subtract(&cur_android_collection)?;
+ let new_android_patches = new_android_patches.subtract(&cur_cros_collection)?;
+
+ // Need to do an extra filtering step for Android, as AOSP doesn't
+ // want patches outside of the start/end bounds.
+ let android_llvm_version: u64 = {
+ let android_llvm_version_str =
+ android_utils::get_android_llvm_version(&ctx.android_checkout)?;
+ android_llvm_version_str.parse::<u64>().with_context(|| {
+ format!(
+ "converting llvm version to u64: '{}'",
+ android_llvm_version_str
+ )
+ })?
+ };
+ let new_android_patches = new_android_patches.filter_patches(|p| {
+ match (p.get_start_version(), p.get_end_version()) {
+ (Some(start), Some(end)) => start <= android_llvm_version && android_llvm_version < end,
+ (Some(start), None) => start <= android_llvm_version,
+ (None, Some(end)) => android_llvm_version < end,
+ (None, None) => true,
+ }
+ });
+
+ if args.verbose {
+ display_patches("New patches from Chromium OS", &new_cros_patches);
+ display_patches("New patches from Android", &new_android_patches);
+ }
+
+ if args.dry_run {
+ println!("--dry-run specified; skipping modifications");
+ return Ok(());
+ }
+
+ modify_repos(
+ &ctx,
+ args.no_commit,
+ ModifyOpt {
+ new_cros_patches,
+ cur_cros_collection,
+ cros_reviewers: args.cros_reviewers,
+ new_android_patches,
+ cur_android_collection,
+ android_reviewers: args.android_reviewers,
+ },
+ )
+}
+
+struct ModifyOpt {
+ new_cros_patches: PatchCollection,
+ cur_cros_collection: PatchCollection,
+ cros_reviewers: Vec<String>,
+ new_android_patches: PatchCollection,
+ cur_android_collection: PatchCollection,
+ android_reviewers: Vec<String>,
+}
+
+fn modify_repos(ctx: &RepoSetupContext, no_commit: bool, opt: ModifyOpt) -> Result<()> {
+ // Cleanup on scope exit.
+ scopeguard::defer! {
+ ctx.cleanup();
+ }
+ // Transpose Patches -----------------------------------------------------
+ let mut cur_android_collection = opt.cur_android_collection;
+ let mut cur_cros_collection = opt.cur_cros_collection;
+ if !opt.new_cros_patches.is_empty() {
+ opt.new_cros_patches
+ .transpose_write(&mut cur_android_collection)?;
+ }
+ if !opt.new_android_patches.is_empty() {
+ opt.new_android_patches
+ .transpose_write(&mut cur_cros_collection)?;
+ }
+
+ if no_commit {
+ println!("--no-commit specified; not committing or uploading");
+ return Ok(());
+ }
+ // Commit and upload for review ------------------------------------------
+ // Note we want to check if the android patches are empty for CrOS, and
+ // vice versa. This is a little counterintuitive.
+ if !opt.new_android_patches.is_empty() {
+ ctx.cros_repo_upload(&opt.cros_reviewers)
+ .context("uploading chromiumos changes")?;
+ }
+ if !opt.new_cros_patches.is_empty() {
+ if let Err(e) = android_utils::sort_android_patches(&ctx.android_checkout) {
+ eprintln!(
+ "Couldn't sort Android patches; continuing. Caused by: {}",
+ e
+ );
+ }
+ ctx.android_repo_upload(&opt.android_reviewers)
+ .context("uploading android changes")?;
+ }
+ Ok(())
+}
+
+fn display_patches(prelude: &str, collection: &PatchCollection) {
+ println!("{}", prelude);
+ if collection.patches.is_empty() {
+ println!(" [No Patches]");
+ return;
+ }
+ println!("{}", collection);
+}
+
+#[derive(Debug, structopt::StructOpt)]
+#[structopt(name = "patch_sync", about = "A pipeline for syncing the patch code")]
+enum Opt {
+ /// Show a combined view of the PATCHES.json file, without making any changes.
+ #[allow(dead_code)]
+ Show {
+ #[structopt(parse(from_os_str))]
+ cros_checkout_path: PathBuf,
+ #[structopt(parse(from_os_str))]
+ android_checkout_path: PathBuf,
+
+ /// Keep a patch's platform field even if it's not merged at that platform.
+ #[structopt(long)]
+ keep_unmerged: bool,
+
+ /// Run repo sync before transposing.
+ #[structopt(short, long)]
+ sync: bool,
+ },
+ /// Transpose patches from two PATCHES.json files
+ /// to each other.
+ Transpose {
+ /// Path to the ChromiumOS source repo checkout.
+ #[structopt(long = "cros-checkout", parse(from_os_str))]
+ cros_checkout_path: PathBuf,
+
+ /// Emails to send review requests to during Chromium OS upload.
+ /// Comma separated.
+ #[structopt(long = "cros-rev")]
+ cros_reviewers: Option<String>,
+
+ /// Git ref (e.g. hash) for the ChromiumOS overlay to use as the base.
+ #[structopt(long = "overlay-base-ref")]
+ old_cros_ref: String,
+
+ /// Path to the Android Open Source Project source repo checkout.
+ #[structopt(long = "aosp-checkout", parse(from_os_str))]
+ android_checkout_path: PathBuf,
+
+ /// Emails to send review requests to during Android upload.
+ /// Comma separated.
+ #[structopt(long = "aosp-rev")]
+ android_reviewers: Option<String>,
+
+ /// Git ref (e.g. hash) for the llvm_android repo to use as the base.
+ #[structopt(long = "aosp-base-ref")]
+ old_android_ref: String,
+
+ /// Run repo sync before transposing.
+ #[structopt(short, long)]
+ sync: bool,
+
+ /// Print information to stdout
+ #[structopt(short, long)]
+ verbose: bool,
+
+ /// Do not change any files. Useful in combination with `--verbose`
+ /// Implies `--no-commit`.
+ #[structopt(long)]
+ dry_run: bool,
+
+ /// Do not commit or upload any changes made.
+ #[structopt(long)]
+ no_commit: bool,
+
+ /// Upload and send things for review, but mark as WIP and send no
+ /// emails.
+ #[structopt(long)]
+ wip: bool,
+
+ /// Don't run CQ if set. Only has an effect if uploading.
+ #[structopt(long)]
+ disable_cq: bool,
+ },
+}
diff --git a/llvm_tools/patch_sync/src/patch_parsing.rs b/llvm_tools/patch_sync/src/patch_parsing.rs
new file mode 100644
index 0000000..124f0d6
--- /dev/null
+++ b/llvm_tools/patch_sync/src/patch_parsing.rs
@@ -0,0 +1,462 @@
+use std::collections::{BTreeMap, BTreeSet};
+use std::fs::{copy, File};
+use std::io::{BufRead, BufReader, Read, Write};
+use std::path::{Path, PathBuf};
+
+use anyhow::{anyhow, Context, Result};
+use serde::{Deserialize, Serialize};
+use sha2::{Digest, Sha256};
+
+/// JSON serde struct.
+// FIXME(b/221489531): Remove when we clear out start_version and
+// end_version.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct PatchDictSchema {
+ /// [deprecated(since = "1.1", note = "Use version_range")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub end_version: Option<u64>,
+ pub metadata: Option<BTreeMap<String, serde_json::Value>>,
+ #[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
+ pub platforms: BTreeSet<String>,
+ pub rel_patch_path: String,
+ /// [deprecated(since = "1.1", note = "Use version_range")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub start_version: Option<u64>,
+ pub version_range: Option<VersionRange>,
+}
+
+#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
+pub struct VersionRange {
+ pub from: Option<u64>,
+ pub until: Option<u64>,
+}
+
+// FIXME(b/221489531): Remove when we clear out start_version and
+// end_version.
+impl PatchDictSchema {
+ pub fn get_start_version(&self) -> Option<u64> {
+ self.version_range
+ .map(|x| x.from)
+ .unwrap_or(self.start_version)
+ }
+
+ pub fn get_end_version(&self) -> Option<u64> {
+ self.version_range
+ .map(|x| x.until)
+ .unwrap_or(self.end_version)
+ }
+}
+
+/// Struct to keep track of patches and their relative paths.
+#[derive(Debug, Clone)]
+pub struct PatchCollection {
+ pub patches: Vec<PatchDictSchema>,
+ pub workdir: PathBuf,
+}
+
+impl PatchCollection {
+ /// Create a `PatchCollection` from a PATCHES.
+ pub fn parse_from_file(json_file: &Path) -> Result<Self> {
+ Ok(Self {
+ patches: serde_json::from_reader(File::open(json_file)?)?,
+ workdir: json_file
+ .parent()
+ .ok_or_else(|| anyhow!("failed to get json_file parent"))?
+ .to_path_buf(),
+ })
+ }
+
+ /// Create a `PatchCollection` from a string literal and a workdir.
+ pub fn parse_from_str(workdir: PathBuf, contents: &str) -> Result<Self> {
+ Ok(Self {
+ patches: serde_json::from_str(contents).context("parsing from str")?,
+ workdir,
+ })
+ }
+
+ /// Copy this collection with patches filtered by given criterion.
+ pub fn filter_patches(&self, f: impl FnMut(&PatchDictSchema) -> bool) -> Self {
+ Self {
+ patches: self.patches.iter().cloned().filter(f).collect(),
+ workdir: self.workdir.clone(),
+ }
+ }
+
+ /// Map over the patches.
+ pub fn map_patches(&self, f: impl FnMut(&PatchDictSchema) -> PatchDictSchema) -> Self {
+ Self {
+ patches: self.patches.iter().map(f).collect(),
+ workdir: self.workdir.clone(),
+ }
+ }
+
+ /// Return true if the collection is tracking any patches.
+ pub fn is_empty(&self) -> bool {
+ self.patches.is_empty()
+ }
+
+ /// Compute the set-set subtraction, returning a new `PatchCollection` which
+ /// keeps the minuend's workdir.
+ pub fn subtract(&self, subtrahend: &Self) -> Result<Self> {
+ let mut new_patches = Vec::new();
+ // This is O(n^2) when it could be much faster, but n is always going to be less
+ // than 1k and speed is not important here.
+ for our_patch in &self.patches {
+ let found_in_sub = subtrahend.patches.iter().any(|sub_patch| {
+ let hash1 = subtrahend
+ .hash_from_rel_patch(sub_patch)
+ .expect("getting hash from subtrahend patch");
+ let hash2 = self
+ .hash_from_rel_patch(our_patch)
+ .expect("getting hash from our patch");
+ hash1 == hash2
+ });
+ if !found_in_sub {
+ new_patches.push(our_patch.clone());
+ }
+ }
+ Ok(Self {
+ patches: new_patches,
+ workdir: self.workdir.clone(),
+ })
+ }
+
+ pub fn union(&self, other: &Self) -> Result<Self> {
+ self.union_helper(
+ other,
+ |p| self.hash_from_rel_patch(p),
+ |p| other.hash_from_rel_patch(p),
+ )
+ }
+
+ fn union_helper(
+ &self,
+ other: &Self,
+ our_hash_f: impl Fn(&PatchDictSchema) -> Result<String>,
+ their_hash_f: impl Fn(&PatchDictSchema) -> Result<String>,
+ ) -> Result<Self> {
+ // 1. For all our patches:
+ // a. If there exists a matching patch hash from `other`:
+ // i. Create a new patch with merged platform info,
+ // ii. add the new patch to our new collection.
+ // iii. Mark the other patch as "merged"
+ // b. Otherwise, copy our patch to the new collection
+ // 2. For all unmerged patches from the `other`
+ // a. Copy their patch into the new collection
+ let mut combined_patches = Vec::new();
+ let mut other_merged = vec![false; other.patches.len()];
+
+ // 1.
+ for p in &self.patches {
+ let our_hash = our_hash_f(p)?;
+ let mut found = false;
+ // a.
+ for (idx, merged) in other_merged.iter_mut().enumerate() {
+ if !*merged {
+ let other_p = &other.patches[idx];
+ let their_hash = their_hash_f(other_p)?;
+ if our_hash == their_hash {
+ // i.
+ let new_platforms =
+ p.platforms.union(&other_p.platforms).cloned().collect();
+ // ii.
+ combined_patches.push(PatchDictSchema {
+ rel_patch_path: p.rel_patch_path.clone(),
+ start_version: p.start_version,
+ end_version: p.end_version,
+ platforms: new_platforms,
+ metadata: p.metadata.clone(),
+ version_range: p.version_range,
+ });
+ // iii.
+ *merged = true;
+ found = true;
+ break;
+ }
+ }
+ }
+ // b.
+ if !found {
+ combined_patches.push(p.clone());
+ }
+ }
+ // 2.
+ // Add any remaining, other-only patches.
+ for (idx, merged) in other_merged.iter().enumerate() {
+ if !*merged {
+ combined_patches.push(other.patches[idx].clone());
+ }
+ }
+
+ Ok(Self {
+ workdir: self.workdir.clone(),
+ patches: combined_patches,
+ })
+ }
+
+ /// Copy all patches from this collection into another existing collection, and write that
+ /// to the existing collection's file.
+ pub fn transpose_write(&self, existing_collection: &mut Self) -> Result<()> {
+ for p in &self.patches {
+ let original_file_path = self.workdir.join(&p.rel_patch_path);
+ let copy_file_path = existing_collection.workdir.join(&p.rel_patch_path);
+ copy_create_parents(&original_file_path, ©_file_path)?;
+ existing_collection.patches.push(p.clone());
+ }
+ existing_collection.write_patches_json("PATCHES.json")
+ }
+
+ /// Write out the patch collection contents to a PATCHES.json file.
+ fn write_patches_json(&self, filename: &str) -> Result<()> {
+ let write_path = self.workdir.join(filename);
+ let mut new_patches_file = File::create(&write_path)
+ .with_context(|| format!("writing to {}", write_path.display()))?;
+ new_patches_file.write_all(self.serialize_patches()?.as_bytes())?;
+ Ok(())
+ }
+
+ pub fn serialize_patches(&self) -> Result<String> {
+ let mut serialization_buffer = Vec::<u8>::new();
+ // Four spaces to indent json serialization.
+ let mut serializer = serde_json::Serializer::with_formatter(
+ &mut serialization_buffer,
+ serde_json::ser::PrettyFormatter::with_indent(b" "),
+ );
+ self.patches
+ .serialize(&mut serializer)
+ .context("serializing patches to JSON")?;
+ // Append a newline at the end if not present. This is necessary to get
+ // past some pre-upload hooks.
+ if serialization_buffer.last() != Some(&b'\n') {
+ serialization_buffer.push(b'\n');
+ }
+ Ok(std::str::from_utf8(&serialization_buffer)?.to_string())
+ }
+
+ /// Return whether a given patch actually exists on the file system.
+ pub fn patch_exists(&self, patch: &PatchDictSchema) -> bool {
+ self.workdir.join(&patch.rel_patch_path).exists()
+ }
+
+ fn hash_from_rel_patch(&self, patch: &PatchDictSchema) -> Result<String> {
+ hash_from_patch_path(&self.workdir.join(&patch.rel_patch_path))
+ }
+}
+
+impl std::fmt::Display for PatchCollection {
+ fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
+ for (i, p) in self.patches.iter().enumerate() {
+ let title = p
+ .metadata
+ .as_ref()
+ .and_then(|x| x.get("title"))
+ .and_then(serde_json::Value::as_str)
+ .unwrap_or("[No Title]");
+ let path = self.workdir.join(&p.rel_patch_path);
+ writeln!(f, "* {}", title)?;
+ if i == self.patches.len() - 1 {
+ write!(f, " {}", path.display())?;
+ } else {
+ writeln!(f, " {}", path.display())?;
+ }
+ }
+ Ok(())
+ }
+}
+
+/// Generate a PatchCollection incorporating only the diff between current patches and old patch
+/// contents.
+pub fn new_patches(
+ patches_path: &Path,
+ old_patch_contents: &str,
+ platform: &str,
+) -> Result<(PatchCollection, PatchCollection)> {
+ let cur_collection = PatchCollection::parse_from_file(patches_path)
+ .with_context(|| format!("parsing {} PATCHES.json", platform))?;
+ let cur_collection = filter_patches_by_platform(&cur_collection, platform);
+ let cur_collection = cur_collection.filter_patches(|p| cur_collection.patch_exists(p));
+ let new_patches: PatchCollection = {
+ let old_collection = PatchCollection::parse_from_str(
+ patches_path.parent().unwrap().to_path_buf(),
+ old_patch_contents,
+ )?;
+ let old_collection = old_collection.filter_patches(|p| old_collection.patch_exists(p));
+ cur_collection.subtract(&old_collection)?
+ };
+ let new_patches = new_patches.map_patches(|p| {
+ let mut platforms = BTreeSet::new();
+ platforms.extend(["android".to_string(), "chromiumos".to_string()]);
+ PatchDictSchema {
+ platforms: platforms.union(&p.platforms).cloned().collect(),
+ ..p.to_owned()
+ }
+ });
+ Ok((cur_collection, new_patches))
+}
+
+/// Create a new collection with only the patches that apply to the
+/// given platform.
+///
+/// If there's no platform listed, the patch should still apply if the patch file exists.
+pub fn filter_patches_by_platform(collection: &PatchCollection, platform: &str) -> PatchCollection {
+ collection.filter_patches(|p| {
+ p.platforms.contains(platform) || (p.platforms.is_empty() && collection.patch_exists(p))
+ })
+}
+
+/// Get the hash from the patch file contents.
+///
+/// Not every patch file actually contains its own hash,
+/// we must compute the hash ourselves when it's not found.
+fn hash_from_patch(patch_contents: impl Read) -> Result<String> {
+ let mut reader = BufReader::new(patch_contents);
+ let mut buf = String::new();
+ reader.read_line(&mut buf)?;
+ let mut first_line_iter = buf.trim().split(' ').fuse();
+ let (fst_word, snd_word) = (first_line_iter.next(), first_line_iter.next());
+ if let (Some("commit" | "From"), Some(hash_str)) = (fst_word, snd_word) {
+ // If the first line starts with either "commit" or "From", the following
+ // text is almost certainly a commit hash.
+ Ok(hash_str.to_string())
+ } else {
+ // This is an annoying case where the patch isn't actually a commit.
+ // So we'll hash the entire file, and hope that's sufficient.
+ let mut hasher = Sha256::new();
+ hasher.update(&buf); // Have to hash the first line.
+ reader.read_to_string(&mut buf)?;
+ hasher.update(buf); // Hash the rest of the file.
+ let sha = hasher.finalize();
+ Ok(format!("{:x}", &sha))
+ }
+}
+
+fn hash_from_patch_path(patch: &Path) -> Result<String> {
+ let f = File::open(patch).with_context(|| format!("opening patch file {}", patch.display()))?;
+ hash_from_patch(f)
+}
+
+/// Copy a file from one path to another, and create any parent
+/// directories along the way.
+fn copy_create_parents(from: &Path, to: &Path) -> Result<()> {
+ let to_parent = to
+ .parent()
+ .with_context(|| format!("getting parent of {}", to.display()))?;
+ if !to_parent.exists() {
+ std::fs::create_dir_all(to_parent)?;
+ }
+
+ copy(&from, &to)
+ .with_context(|| format!("copying file from {} to {}", &from.display(), &to.display()))?;
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+
+ use super::*;
+
+ /// Test we can extract the hash from patch files.
+ #[test]
+ fn test_hash_from_patch() {
+ // Example git patch from Gerrit
+ let desired_hash = "004be4037e1e9c6092323c5c9268acb3ecf9176c";
+ let test_file_contents = "commit 004be4037e1e9c6092323c5c9268acb3ecf9176c\n\
+ Author: An Author <some_email>\n\
+ Date: Thu Aug 6 12:34:16 2020 -0700";
+ assert_eq!(
+ &hash_from_patch(test_file_contents.as_bytes()).unwrap(),
+ desired_hash
+ );
+
+ // Example git patch from upstream
+ let desired_hash = "6f85225ef3791357f9b1aa097b575b0a2b0dff48";
+ let test_file_contents = "From 6f85225ef3791357f9b1aa097b575b0a2b0dff48\n\
+ Mon Sep 17 00:00:00 2001\n\
+ From: Another Author <another_email>\n\
+ Date: Wed, 18 Aug 2021 15:03:03 -0700";
+ assert_eq!(
+ &hash_from_patch(test_file_contents.as_bytes()).unwrap(),
+ desired_hash
+ );
+ }
+
+ #[test]
+ fn test_union() {
+ let patch1 = PatchDictSchema {
+ start_version: Some(0),
+ end_version: Some(1),
+ rel_patch_path: "a".into(),
+ metadata: None,
+ platforms: BTreeSet::from(["x".into()]),
+ version_range: Some(VersionRange {
+ from: Some(0),
+ until: Some(1),
+ }),
+ };
+ let patch2 = PatchDictSchema {
+ rel_patch_path: "b".into(),
+ platforms: BTreeSet::from(["x".into(), "y".into()]),
+ ..patch1.clone()
+ };
+ let patch3 = PatchDictSchema {
+ platforms: BTreeSet::from(["z".into(), "x".into()]),
+ ..patch1.clone()
+ };
+ let collection1 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1, patch2],
+ };
+ let collection2 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch3],
+ };
+ let union = collection1
+ .union_helper(
+ &collection2,
+ |p| Ok(p.rel_patch_path.to_string()),
+ |p| Ok(p.rel_patch_path.to_string()),
+ )
+ .expect("could not create union");
+ assert_eq!(union.patches.len(), 2);
+ assert_eq!(
+ union.patches[0].platforms.iter().collect::<Vec<&String>>(),
+ vec!["x", "z"]
+ );
+ assert_eq!(
+ union.patches[1].platforms.iter().collect::<Vec<&String>>(),
+ vec!["x", "y"]
+ );
+ }
+
+ #[test]
+ fn test_union_empties() {
+ let patch1 = PatchDictSchema {
+ start_version: Some(0),
+ end_version: Some(1),
+ rel_patch_path: "a".into(),
+ metadata: None,
+ platforms: Default::default(),
+ version_range: Some(VersionRange {
+ from: Some(0),
+ until: Some(1),
+ }),
+ };
+ let collection1 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1.clone()],
+ };
+ let collection2 = PatchCollection {
+ workdir: PathBuf::new(),
+ patches: vec![patch1],
+ };
+ let union = collection1
+ .union_helper(
+ &collection2,
+ |p| Ok(p.rel_patch_path.to_string()),
+ |p| Ok(p.rel_patch_path.to_string()),
+ )
+ .expect("could not create union");
+ assert_eq!(union.patches.len(), 1);
+ assert_eq!(union.patches[0].platforms.len(), 0);
+ }
+}
diff --git a/llvm_tools/patch_sync/src/version_control.rs b/llvm_tools/patch_sync/src/version_control.rs
new file mode 100644
index 0000000..e07d39d
--- /dev/null
+++ b/llvm_tools/patch_sync/src/version_control.rs
@@ -0,0 +1,400 @@
+use anyhow::{anyhow, bail, ensure, Context, Result};
+use regex::Regex;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Output};
+
+const CHROMIUMOS_OVERLAY_REL_PATH: &str = "src/third_party/chromiumos-overlay";
+const ANDROID_LLVM_REL_PATH: &str = "toolchain/llvm_android";
+
+const CROS_MAIN_BRANCH: &str = "main";
+const ANDROID_MAIN_BRANCH: &str = "master"; // nocheck
+const WORK_BRANCH_NAME: &str = "__patch_sync_tmp";
+
+/// Context struct to keep track of both Chromium OS and Android checkouts.
+#[derive(Debug)]
+pub struct RepoSetupContext {
+ pub cros_checkout: PathBuf,
+ pub android_checkout: PathBuf,
+ /// Run `repo sync` before doing any comparisons.
+ pub sync_before: bool,
+ pub wip_mode: bool,
+ pub enable_cq: bool,
+}
+
+impl RepoSetupContext {
+ pub fn setup(&self) -> Result<()> {
+ if self.sync_before {
+ {
+ let crpp = self.cros_patches_path();
+ let cros_git = crpp.parent().unwrap();
+ git_cd_cmd(cros_git, ["checkout", CROS_MAIN_BRANCH])?;
+ }
+ {
+ let anpp = self.android_patches_path();
+ let android_git = anpp.parent().unwrap();
+ git_cd_cmd(android_git, ["checkout", ANDROID_MAIN_BRANCH])?;
+ }
+ repo_cd_cmd(&self.cros_checkout, &["sync", CHROMIUMOS_OVERLAY_REL_PATH])?;
+ repo_cd_cmd(&self.android_checkout, &["sync", ANDROID_LLVM_REL_PATH])?;
+ }
+ Ok(())
+ }
+
+ pub fn cros_repo_upload<S: AsRef<str>>(&self, reviewers: &[S]) -> Result<()> {
+ let llvm_dir = self
+ .cros_checkout
+ .join(&CHROMIUMOS_OVERLAY_REL_PATH)
+ .join("sys-devel/llvm");
+ ensure!(
+ llvm_dir.is_dir(),
+ "CrOS LLVM dir {} is not a directory",
+ llvm_dir.display()
+ );
+ Self::rev_bump_llvm(&llvm_dir)?;
+ let mut extra_args = Vec::new();
+ for reviewer in reviewers {
+ extra_args.push("--re");
+ extra_args.push(reviewer.as_ref());
+ }
+ if self.wip_mode {
+ extra_args.push("--wip");
+ extra_args.push("--no-emails");
+ }
+ if self.enable_cq {
+ extra_args.push("--label=Commit-Queue+1");
+ }
+ Self::repo_upload(
+ &self.cros_checkout,
+ CHROMIUMOS_OVERLAY_REL_PATH,
+ &Self::build_commit_msg(
+ "llvm: Synchronize patches from android",
+ "android",
+ "chromiumos",
+ "BUG=None\nTEST=CQ",
+ ),
+ extra_args,
+ )
+ }
+
+ pub fn android_repo_upload<S: AsRef<str>>(&self, reviewers: &[S]) -> Result<()> {
+ let mut extra_args = Vec::new();
+ for reviewer in reviewers {
+ extra_args.push("--re");
+ extra_args.push(reviewer.as_ref());
+ }
+ if self.wip_mode {
+ extra_args.push("--wip");
+ extra_args.push("--no-emails");
+ }
+ if self.enable_cq {
+ extra_args.push("--label=Presubmit-Ready+1");
+ }
+ Self::repo_upload(
+ &self.android_checkout,
+ ANDROID_LLVM_REL_PATH,
+ &Self::build_commit_msg(
+ "Synchronize patches from chromiumos",
+ "chromiumos",
+ "android",
+ "Test: N/A",
+ ),
+ extra_args,
+ )
+ }
+
+ fn cros_cleanup(&self) -> Result<()> {
+ let git_path = self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH);
+ Self::cleanup_branch(&git_path, CROS_MAIN_BRANCH, WORK_BRANCH_NAME)
+ .with_context(|| format!("cleaning up branch {}", WORK_BRANCH_NAME))?;
+ Ok(())
+ }
+
+ fn android_cleanup(&self) -> Result<()> {
+ let git_path = self.android_checkout.join(ANDROID_LLVM_REL_PATH);
+ Self::cleanup_branch(&git_path, ANDROID_MAIN_BRANCH, WORK_BRANCH_NAME)
+ .with_context(|| format!("cleaning up branch {}", WORK_BRANCH_NAME))?;
+ Ok(())
+ }
+
+ /// Wrapper around cleanups to ensure both get run, even if errors appear.
+ pub fn cleanup(&self) {
+ if let Err(e) = self.cros_cleanup() {
+ eprintln!("Failed to clean up chromiumos, continuing: {}", e);
+ }
+ if let Err(e) = self.android_cleanup() {
+ eprintln!("Failed to clean up android, continuing: {}", e);
+ }
+ }
+
+ /// Get the Android path to the PATCHES.json file
+ pub fn android_patches_path(&self) -> PathBuf {
+ self.android_checkout
+ .join(&ANDROID_LLVM_REL_PATH)
+ .join("patches/PATCHES.json")
+ }
+
+ /// Get the Chromium OS path to the PATCHES.json file
+ pub fn cros_patches_path(&self) -> PathBuf {
+ self.cros_checkout
+ .join(&CHROMIUMOS_OVERLAY_REL_PATH)
+ .join("sys-devel/llvm/files/PATCHES.json")
+ }
+
+ /// Return the contents of the old PATCHES.json from Chromium OS
+ pub fn old_cros_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.cros_checkout.join(CHROMIUMOS_OVERLAY_REL_PATH),
+ Path::new("sys-devel/llvm/files/PATCHES.json"),
+ )
+ }
+
+ /// Return the contents of the old PATCHES.json from android
+ pub fn old_android_patch_contents(&self, hash: &str) -> Result<String> {
+ Self::old_file_contents(
+ hash,
+ &self.android_checkout.join(ANDROID_LLVM_REL_PATH),
+ Path::new("patches/PATCHES.json"),
+ )
+ }
+
+ fn repo_upload<'a, I: IntoIterator<Item = &'a str>>(
+ checkout_path: &Path,
+ subproject_git_wd: &'a str,
+ commit_msg: &str,
+ extra_flags: I,
+ ) -> Result<()> {
+ let git_path = &checkout_path.join(&subproject_git_wd);
+ ensure!(
+ git_path.is_dir(),
+ "git_path {} is not a directory",
+ git_path.display()
+ );
+ repo_cd_cmd(
+ checkout_path,
+ &["start", WORK_BRANCH_NAME, subproject_git_wd],
+ )?;
+ let base_args = ["upload", "--br", WORK_BRANCH_NAME, "-y", "--verify"];
+ let new_args = base_args
+ .iter()
+ .copied()
+ .chain(extra_flags)
+ .chain(["--", subproject_git_wd]);
+ git_cd_cmd(git_path, &["add", "."])
+ .and_then(|_| git_cd_cmd(git_path, &["commit", "-m", commit_msg]))
+ .and_then(|_| repo_cd_cmd(checkout_path, new_args))?;
+ Ok(())
+ }
+
+ /// Clean up the git repo after we're done with it.
+ fn cleanup_branch(git_path: &Path, base_branch: &str, rm_branch: &str) -> Result<()> {
+ git_cd_cmd(git_path, ["restore", "."])?;
+ git_cd_cmd(git_path, ["clean", "-fd"])?;
+ git_cd_cmd(git_path, ["checkout", base_branch])?;
+ // It's acceptable to be able to not delete the branch. This may be
+ // because the branch does not exist, which is an expected result.
+ // Since this is a very common case, we won't report any failures related
+ // to this command failure as it'll pollute the stderr logs.
+ let _ = git_cd_cmd(git_path, ["branch", "-D", rm_branch]);
+ Ok(())
+ }
+
+ /// Increment LLVM's revision number
+ fn rev_bump_llvm(llvm_dir: &Path) -> Result<PathBuf> {
+ let ebuild = find_ebuild(llvm_dir)
+ .with_context(|| format!("finding ebuild in {} to rev bump", llvm_dir.display()))?;
+ let ebuild_dir = ebuild.parent().unwrap();
+ let suffix_matcher = Regex::new(r"-r([0-9]+)\.ebuild").unwrap();
+ let ebuild_name = ebuild
+ .file_name()
+ .unwrap()
+ .to_str()
+ .ok_or_else(|| anyhow!("converting ebuild filename to utf-8"))?;
+ let new_path = if let Some(captures) = suffix_matcher.captures(ebuild_name) {
+ let full_suffix = captures.get(0).unwrap().as_str();
+ let cur_version = captures.get(1).unwrap().as_str().parse::<u32>().unwrap();
+ let new_filename =
+ ebuild_name.replace(full_suffix, &format!("-r{}.ebuild", cur_version + 1_u32));
+ let new_path = ebuild_dir.join(new_filename);
+ fs::rename(&ebuild, &new_path)?;
+ new_path
+ } else {
+ // File did not end in a revision. We should append -r1 to the end.
+ let new_filename = ebuild.file_stem().unwrap().to_string_lossy() + "-r1.ebuild";
+ let new_path = ebuild_dir.join(new_filename.as_ref());
+ fs::rename(&ebuild, &new_path)?;
+ new_path
+ };
+ Ok(new_path)
+ }
+
+ /// Return the contents of an old file in git
+ fn old_file_contents(hash: &str, pwd: &Path, file: &Path) -> Result<String> {
+ let git_ref = format!(
+ "{}:{}",
+ hash,
+ file.to_str()
+ .ok_or_else(|| anyhow!("failed to convert filepath to str"))?
+ );
+ let output = git_cd_cmd(pwd, &["show", &git_ref])?;
+ if !output.status.success() {
+ bail!("could not get old file contents for {}", &git_ref)
+ }
+ String::from_utf8(output.stdout)
+ .with_context(|| format!("converting {} file contents to UTF-8", &git_ref))
+ }
+
+ /// Create the commit message
+ fn build_commit_msg(subj: &str, from: &str, to: &str, footer: &str) -> String {
+ format!(
+ "[patch_sync] {}\n\n\
+Copies new PATCHES.json changes from {} to {}.\n
+For questions about this job, contact [email protected]\n\n
+{}",
+ subj, from, to, footer
+ )
+ }
+}
+
+/// Return the path of an ebuild located within the given directory.
+fn find_ebuild(dir: &Path) -> Result<PathBuf> {
+ // The logic here is that we create an iterator over all file paths to ebuilds
+ // with _pre in the name. Then we sort those ebuilds based on their revision numbers.
+ // Then we return the highest revisioned one.
+
+ let ebuild_rev_matcher = Regex::new(r"-r([0-9]+)\.ebuild").unwrap();
+ // For LLVM ebuilds, we only want to check for ebuilds that have this in their file name.
+ let per_heuristic = "_pre";
+ // Get an iterator over all ebuilds with a _per in the file name.
+ let ebuild_candidates = fs::read_dir(dir)?.filter_map(|entry| {
+ let entry = entry.ok()?;
+ let path = entry.path();
+ if path.extension()? != "ebuild" {
+ // Not an ebuild, ignore.
+ return None;
+ }
+ let stem = path.file_stem()?.to_str()?;
+ if stem.contains(per_heuristic) {
+ return Some(path);
+ }
+ None
+ });
+ let try_parse_ebuild_rev = |path: PathBuf| -> Option<(u64, PathBuf)> {
+ let name = path.file_name()?;
+ if let Some(rev_match) = ebuild_rev_matcher.captures(name.to_str()?) {
+ let rev_str = rev_match.get(1)?;
+ let rev_num = rev_str.as_str().parse::<u64>().ok()?;
+ return Some((rev_num, path));
+ }
+ // If it doesn't have a revision, then it's revision 0.
+ Some((0, path))
+ };
+ let mut sorted_candidates: Vec<_> =
+ ebuild_candidates.filter_map(try_parse_ebuild_rev).collect();
+ sorted_candidates.sort_unstable_by_key(|x| x.0);
+ let highest_rev_ebuild = sorted_candidates
+ .pop()
+ .ok_or_else(|| anyhow!("could not find ebuild"))?;
+ Ok(highest_rev_ebuild.1)
+}
+
+/// Run a given git command from inside a specified git dir.
+pub fn git_cd_cmd<I, S>(pwd: &Path, args: I) -> Result<Output>
+where
+ I: IntoIterator<Item = S>,
+ S: AsRef<OsStr>,
+{
+ let mut command = Command::new("git");
+ command.current_dir(&pwd).args(args);
+ let output = command.output()?;
+ if !output.status.success() {
+ bail!(
+ "git command failed:\n {:?}\nstdout --\n{}\nstderr --\n{}",
+ command,
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr),
+ );
+ }
+ Ok(output)
+}
+
+pub fn repo_cd_cmd<I, S>(pwd: &Path, args: I) -> Result<()>
+where
+ I: IntoIterator<Item = S>,
+ S: AsRef<OsStr>,
+{
+ let mut command = Command::new("repo");
+ command.current_dir(&pwd).args(args);
+ let status = command.status()?;
+ if !status.success() {
+ bail!("repo command failed:\n {:?} \n", command)
+ }
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use rand::prelude::Rng;
+ use std::env;
+ use std::fs::File;
+
+ #[test]
+ fn test_revbump_ebuild() {
+ // Random number to append at the end of the test folder to prevent conflicts.
+ let rng: u32 = rand::thread_rng().gen();
+ let llvm_dir = env::temp_dir().join(format!("patch_sync_test_{}", rng));
+ fs::create_dir(&llvm_dir).expect("creating llvm dir in temp directory");
+
+ {
+ // With revision
+ let ebuild_name = "llvm-13.0_pre433403_p20211019-r10.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r11.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
+ }
+ {
+ // Without revision
+ let ebuild_name = "llvm-13.0_pre433403_p20211019.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r1.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild file");
+ }
+ {
+ // With both
+ let ebuild_name = "llvm-13.0_pre433403_p20211019.ebuild";
+ let ebuild_path = llvm_dir.join(ebuild_name);
+ File::create(&ebuild_path).expect("creating test ebuild file");
+ let ebuild_link_name = "llvm-13.0_pre433403_p20211019-r2.ebuild";
+ let ebuild_link_path = llvm_dir.join(ebuild_link_name);
+ File::create(&ebuild_link_path).expect("creating test ebuild link file");
+ let new_ebuild_path =
+ RepoSetupContext::rev_bump_llvm(&llvm_dir).expect("rev bumping the ebuild");
+ assert!(
+ new_ebuild_path.ends_with("llvm-13.0_pre433403_p20211019-r3.ebuild"),
+ "{}",
+ new_ebuild_path.display()
+ );
+ fs::remove_file(new_ebuild_path).expect("removing renamed ebuild link file");
+ fs::remove_file(ebuild_path).expect("removing renamed ebuild file");
+ }
+
+ fs::remove_dir(&llvm_dir).expect("removing temp test dir");
+ }
+}
diff --git a/llvm_tools/revert_checker.py b/llvm_tools/revert_checker.py
index bb9182b..acc8b5f 100755
--- a/llvm_tools/revert_checker.py
+++ b/llvm_tools/revert_checker.py
@@ -1,8 +1,17 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
+#===----------------------------------------------------------------------===##
+#
+# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+# See https://llvm.org/LICENSE.txt for license information.
+# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+#
+#===----------------------------------------------------------------------===##
+#
+# !!!!!!!!!!!! NOTE !!!!!!!!!!!!
+# This is copied directly from upstream LLVM. Please make any changes upstream,
+# rather than to this file directly. Once changes are made there, you're free
+# to integrate them here.
"""Checks for reverts of commits across a given git commit.
@@ -20,19 +29,31 @@
uses a bundle of heuristics, and is bound to ignore / incorrectly flag some
reverts. The hope is that it'll easily catch the vast majority (>90%) of them,
though.
+
+This is designed to be used in one of two ways: an import in Python, or run
+directly from a shell. If you want to import this, the `find_reverts`
+function is the thing to look at. If you'd rather use this from a shell, have a
+usage example:
+
+```
+./revert_checker.py c47f97169 origin/main origin/release/12.x
+```
+
+This checks for all reverts from the tip of origin/main to c47f97169, which are
+across the latter. It then does the same for origin/release/12.x to c47f97169.
+Duplicate reverts discovered when walking both roots (origin/main and
+origin/release/12.x) are deduplicated in output.
"""
-# pylint: disable=cros-logging-import
-
-from __future__ import print_function
-
import argparse
import collections
import logging
import re
import subprocess
import sys
-import typing as t
+from typing import Generator, List, NamedTuple, Iterable
+
+assert sys.version_info >= (3, 6), 'Only Python 3.6+ is supported.'
# People are creative with their reverts, and heuristics are a bit difficult.
# Like 90% of of reverts have "This reverts commit ${full_sha}".
@@ -43,7 +64,7 @@
# starts involving human intervention, which is probably not worth it for now.
-def _try_parse_reverts_from_commit_message(commit_message: str) -> t.List[str]:
+def _try_parse_reverts_from_commit_message(commit_message: str) -> List[str]:
if not commit_message:
return []
@@ -56,9 +77,10 @@
return results
-def _stream_stdout(command: t.List[str]) -> t.Generator[str, None, None]:
+def _stream_stdout(command: List[str]) -> Generator[str, None, None]:
with subprocess.Popen(
command, stdout=subprocess.PIPE, encoding='utf-8', errors='replace') as p:
+ assert p.stdout is not None # for mypy's happiness.
yield from p.stdout
@@ -73,14 +95,14 @@
).strip()
-_LogEntry = t.NamedTuple('_LogEntry', [
+_LogEntry = NamedTuple('_LogEntry', [
('sha', str),
- ('commit_message', t.List[str]),
+ ('commit_message', str),
])
def _log_stream(git_dir: str, root_sha: str,
- end_at_sha: str) -> t.Iterable[_LogEntry]:
+ end_at_sha: str) -> Iterable[_LogEntry]:
sep = 50 * '<>'
log_command = [
'git',
@@ -103,8 +125,6 @@
break
while found_commit_header:
- # crbug.com/1041148
- # pylint: disable=stop-iteration-return
sha = next(stdout_stream, None)
assert sha is not None, 'git died?'
sha = sha.rstrip()
@@ -122,52 +142,54 @@
yield _LogEntry(sha, '\n'.join(commit_message).rstrip())
-def _shas_between(git_dir: str, base_ref: str,
- head_ref: str) -> t.Iterable[str]:
+def _shas_between(git_dir: str, base_ref: str, head_ref: str) -> Iterable[str]:
rev_list = [
'git',
'-C',
git_dir,
'rev-list',
'--first-parent',
- '%s..%s' % (base_ref, head_ref),
+ f'{base_ref}..{head_ref}',
]
return (x.strip() for x in _stream_stdout(rev_list))
def _rev_parse(git_dir: str, ref: str) -> str:
- result = subprocess.check_output(
+ return subprocess.check_output(
['git', '-C', git_dir, 'rev-parse', ref],
encoding='utf-8',
).strip()
- return t.cast(str, result)
-Revert = t.NamedTuple('Revert', [
+Revert = NamedTuple('Revert', [
('sha', str),
('reverted_sha', str),
])
-def find_common_parent_commit(git_dir: str, ref_a: str, ref_b: str) -> str:
+def _find_common_parent_commit(git_dir: str, ref_a: str, ref_b: str) -> str:
+ """Finds the closest common parent commit between `ref_a` and `ref_b`."""
return subprocess.check_output(
['git', '-C', git_dir, 'merge-base', ref_a, ref_b],
encoding='utf-8',
).strip()
-def find_reverts(git_dir: str, across_ref: str, root: str) -> t.List[Revert]:
- """Finds reverts across `across_ref` in `git_dir`, starting from `root`."""
+def find_reverts(git_dir: str, across_ref: str, root: str) -> List[Revert]:
+ """Finds reverts across `across_ref` in `git_dir`, starting from `root`.
+
+ These reverts are returned in order of oldest reverts first.
+ """
across_sha = _rev_parse(git_dir, across_ref)
root_sha = _rev_parse(git_dir, root)
- common_ancestor = find_common_parent_commit(git_dir, across_sha, root_sha)
+ common_ancestor = _find_common_parent_commit(git_dir, across_sha, root_sha)
if common_ancestor != across_sha:
- raise ValueError("%s isn't an ancestor of %s (common ancestor: %s)" %
- (across_sha, root_sha, common_ancestor))
+ raise ValueError(f"{across_sha} isn't an ancestor of {root_sha} "
+ '(common ancestor: {common_ancestor})')
intermediate_commits = set(_shas_between(git_dir, across_sha, root_sha))
- assert across_ref not in intermediate_commits
+ assert across_sha not in intermediate_commits
logging.debug('%d commits appear between %s and %s',
len(intermediate_commits), across_sha, root_sha)
@@ -204,10 +226,14 @@
logging.error("%s claims to revert %s -- which isn't a commit -- %s", sha,
object_type, reverted_sha)
+ # Since `all_reverts` contains reverts in log order (e.g., newer comes before
+ # older), we need to reverse this to keep with our guarantee of older =
+ # earlier in the result.
+ all_reverts.reverse()
return all_reverts
-def main(args: t.List[str]) -> int:
+def _main() -> None:
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
@@ -217,7 +243,7 @@
parser.add_argument(
'root', nargs='+', help='Root(s) to search for commits from.')
parser.add_argument('--debug', action='store_true')
- opts = parser.parse_args(args)
+ opts = parser.parse_args()
logging.basicConfig(
format='%(asctime)s: %(levelname)s: %(filename)s:%(lineno)d: %(message)s',
@@ -228,14 +254,17 @@
# out. The overwhelmingly common case is also to have one root, and it's way
# easier to reason about output that comes in an order that's meaningful to
# git.
- all_reverts = collections.OrderedDict()
+ seen_reverts = set()
+ all_reverts = []
for root in opts.root:
for revert in find_reverts(opts.git_dir, opts.base_ref, root):
- all_reverts[revert] = None
+ if revert not in seen_reverts:
+ seen_reverts.add(revert)
+ all_reverts.append(revert)
- for revert in all_reverts.keys():
- print('%s claims to revert %s' % (revert.sha, revert.reverted_sha))
+ for revert in all_reverts:
+ print(f'{revert.sha} claims to revert {revert.reverted_sha}')
if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]))
+ _main()
diff --git a/llvm_tools/revert_checker_test.py b/llvm_tools/revert_checker_test.py
deleted file mode 100755
index 16b3c3f..0000000
--- a/llvm_tools/revert_checker_test.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2020 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Tests for revert_checker."""
-
-from __future__ import print_function
-
-# pylint: disable=cros-logging-import
-import logging
-import unittest
-
-import llvm_project
-import revert_checker
-
-# pylint: disable=protected-access
-
-
-class _SilencingFilter(object):
- """Silences all log messages.
-
- Also collects info about log messages that would've been emitted.
- """
-
- def __init__(self):
- self.messages = []
-
- def filter(self, record):
- self.messages.append(record.getMessage())
- return 0
-
-
-class Test(unittest.TestCase):
- """Tests for revert_checker."""
-
- def silence_logging(self):
- root = logging.getLogger()
- filt = _SilencingFilter()
- root.addFilter(filt)
- self.addCleanup(root.removeFilter, filt)
- return filt
-
- def test_known_log_stream(self):
- start_sha = 'e241573d5972d34a323fa5c64774c4207340beb3'
- end_sha = 'a7a37517751ffb0f5529011b4ba96e67fcb27510'
- commits = [
- revert_checker._LogEntry(
- 'e241573d5972d34a323fa5c64774c4207340beb3', '\n'.join((
- '[mlir] NFC: remove IntegerValueSet / MutableIntegerSet',
- '',
- 'Summary:',
- '- these are unused and really not needed now given flat '
- 'affine',
- ' constraints',
- '',
- 'Differential Revision: https://reviews.llvm.org/D75792',
- ))),
- revert_checker._LogEntry(
- '97572fa6e9daecd648873496fd11f7d1e25a55f0',
- '[NFC] use hasAnyOperatorName and hasAnyOverloadedOperatorName '
- 'functions in clang-tidy matchers',
- ),
- ]
-
- logs = list(
- revert_checker._log_stream(
- llvm_project.get_location(),
- root_sha=start_sha,
- end_at_sha=end_sha,
- ))
- self.assertEqual(commits, logs)
-
- def test_reverted_noncommit_object_is_a_nop(self):
- log_filter = self.silence_logging()
- # c9944df916e41b1014dff5f6f75d52297b48ecdc mentions reverting a non-commit
- # object. It sits between the given base_ref and root.
- reverts = revert_checker.find_reverts(
- git_dir=llvm_project.get_location(),
- across_ref='c9944df916e41b1014dff5f6f75d52297b48ecdc~',
- root='c9944df916e41b1014dff5f6f75d52297b48ecdc')
- self.assertEqual(reverts, [])
-
- complaint = ('Failed to resolve reverted object '
- 'edd18355be574122aaa9abf58c15d8c50fb085a1')
- self.assertTrue(
- any(x.startswith(complaint) for x in log_filter.messages),
- log_filter.messages)
-
- def test_known_reverts_across_previous_llvm_next_rev(self):
- # c9944df916e41b1014dff5f6f75d52297b48ecdc mentions reverting a non-commit
- # object. It sits between the given base_ref and root.
- reverts = revert_checker.find_reverts(
- git_dir=llvm_project.get_location(),
- across_ref='c47f971694be0159ffddfee8a75ae515eba91439',
- root='9f981e9adf9c8d29bb80306daf08d2770263ade6')
- self.assertEqual(reverts, [
- revert_checker.Revert(
- sha='9f981e9adf9c8d29bb80306daf08d2770263ade6',
- reverted_sha='4060016fce3e6a0b926ee9fc59e440a612d3a2ec'),
- revert_checker.Revert(
- sha='4e0fe038f438ae1679eae9e156e1f248595b2373',
- reverted_sha='65b21282c710afe9c275778820c6e3c1cf46734b'),
- ])
-
-
-if __name__ == '__main__':
- llvm_project.ensure_up_to_date()
- unittest.main()
diff --git a/llvm_tools/update_all_tryjobs_with_auto.py b/llvm_tools/update_all_tryjobs_with_auto.py
deleted file mode 100755
index 11e67ed..0000000
--- a/llvm_tools/update_all_tryjobs_with_auto.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2019 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Updates the status of all tryjobs to the result of `cros buildresult`."""
-
-from __future__ import print_function
-
-import argparse
-import json
-import os
-
-import chroot
-import update_tryjob_status
-
-
-def GetPathToUpdateAllTryjobsWithAutoScript():
- """Returns the absolute path to this script."""
-
- return os.path.abspath(__file__)
-
-
-def GetCommandLineArgs():
- """Parses the command line for the command line arguments."""
-
- # Default absoute path to the chroot if not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
- # Create parser and add optional command-line arguments.
- parser = argparse.ArgumentParser(description=__doc__)
-
- # Add argument for the JSON file to use for the update of a tryjob.
- parser.add_argument(
- '--last_tested',
- required=True,
- help='The absolute path to the JSON file that contains the tryjobs used '
- 'for bisecting LLVM.')
-
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
- args_output = parser.parse_args()
-
- if not os.path.isfile(args_output.last_tested) or \
- not args_output.last_tested.endswith('.json'):
- raise ValueError('File does not exist or does not ending in ".json" '
- ': %s' % args_output.last_tested)
-
- return args_output
-
-
-def main():
- """Updates the status of a tryjob."""
-
- chroot.VerifyOutsideChroot()
-
- args_output = GetCommandLineArgs()
-
- with open(args_output.last_tested) as tryjobs:
- bisect_contents = json.load(tryjobs)
-
- for tryjob in bisect_contents['jobs']:
- if tryjob['status'] == update_tryjob_status.TryjobStatus.PENDING.value:
- tryjob['status'] = update_tryjob_status.GetAutoResult(
- args_output.chroot_path, tryjob['buildbucket_id'])
-
- new_file = '%s.new' % args_output.last_tested
- with open(new_file, 'w') as update_tryjobs:
- json.dump(bisect_contents, update_tryjobs, indent=4, separators=(',', ': '))
- os.rename(new_file, args_output.last_tested)
-
-
-if __name__ == '__main__':
- main()
diff --git a/llvm_tools/update_chromeos_llvm_hash.py b/llvm_tools/update_chromeos_llvm_hash.py
index e28fe69..4e9b910 100755
--- a/llvm_tools/update_chromeos_llvm_hash.py
+++ b/llvm_tools/update_chromeos_llvm_hash.py
@@ -4,32 +4,39 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
-"""Updates the LLVM hash and uprevs the build of the specified
-
-packages.
+"""Updates the LLVM hash and uprevs the build of the specified packages.
For each package, a temporary repo is created and the changes are uploaded
for review.
"""
from __future__ import print_function
-from datetime import datetime
-from enum import Enum
import argparse
+import datetime
+import enum
import os
import re
import subprocess
-from failure_modes import FailureModes
import chroot
+import failure_modes
import get_llvm_hash
import git
import llvm_patch_management
+DEFAULT_PACKAGES = [
+ 'dev-util/lldb-server',
+ 'sys-devel/llvm',
+ 'sys-libs/compiler-rt',
+ 'sys-libs/libcxx',
+ 'sys-libs/libcxxabi',
+ 'sys-libs/llvm-libunwind',
+]
+
# Specify which LLVM hash to update
-class LLVMVariant(Enum):
+class LLVMVariant(enum.Enum):
"""Represent the LLVM hash in an ebuild file to update."""
current = 'LLVM_HASH'
@@ -41,6 +48,21 @@
verbose = False
+def defaultCrosRoot():
+ """Get default location of chroot_path.
+
+ The logic assumes that the cros_root is ~/chromiumos, unless llvm_tools is
+ inside of a CrOS checkout, in which case that checkout should be used.
+
+ Returns:
+ The best guess location for the cros checkout.
+ """
+ llvm_tools_path = os.path.realpath(os.path.dirname(__file__))
+ if llvm_tools_path.endswith('src/third_party/toolchain-utils/llvm_tools'):
+ return os.path.join(llvm_tools_path, '../../../../')
+ return '~/chromiumos'
+
+
def GetCommandLineArgs():
"""Parses the command line for the optional command line arguments.
@@ -51,35 +73,28 @@
and the LLVM version to use when retrieving the LLVM hash.
"""
- # Default path to the chroot if a path is not specified.
- cros_root = os.path.expanduser('~')
- cros_root = os.path.join(cros_root, 'chromiumos')
-
# Create parser and add optional command-line arguments.
parser = argparse.ArgumentParser(
description="Updates the build's hash for llvm-next.")
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=defaultCrosRoot(),
+ help='the path to the chroot (default: %(default)s)')
# Add argument for specific builds to uprev and update their llvm-next hash.
- parser.add_argument(
- '--update_packages',
- default=['sys-devel/llvm'],
- required=False,
- nargs='+',
- help='the ebuilds to update their hash for llvm-next ' \
- '(default: %(default)s)')
+ parser.add_argument('--update_packages',
+ default=DEFAULT_PACKAGES,
+ required=False,
+ nargs='+',
+ help='the ebuilds to update their hash for llvm-next '
+ '(default: %(default)s)')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
# Add argument for the LLVM hash to update
parser.add_argument(
@@ -91,7 +106,7 @@
# Add argument for the LLVM version to use.
parser.add_argument(
'--llvm_version',
- type=get_llvm_hash.is_svn_option,
+ type=get_llvm_hash.IsSvnOption,
required=True,
help='which git hash to use. Either a svn revision, or one '
'of %s' % sorted(get_llvm_hash.KNOWN_HASH_SOURCES))
@@ -99,12 +114,15 @@
# Add argument for the mode of the patch management when handling patches.
parser.add_argument(
'--failure_mode',
- default=FailureModes.FAIL.value,
- choices=[FailureModes.FAIL.value, FailureModes.CONTINUE.value,
- FailureModes.DISABLE_PATCHES.value,
- FailureModes.REMOVE_PATCHES.value],
- help='the mode of the patch manager when handling failed patches ' \
- '(default: %(default)s)')
+ default=failure_modes.FailureModes.FAIL.value,
+ choices=[
+ failure_modes.FailureModes.FAIL.value,
+ failure_modes.FailureModes.CONTINUE.value,
+ failure_modes.FailureModes.DISABLE_PATCHES.value,
+ failure_modes.FailureModes.REMOVE_PATCHES.value
+ ],
+ help='the mode of the patch manager when handling failed patches '
+ '(default: %(default)s)')
# Add argument for the patch metadata file.
parser.add_argument(
@@ -199,7 +217,6 @@
for cur_line in ReplaceLLVMHash(ebuild_file, llvm_variant, git_hash,
svn_version):
temp_file.write(cur_line)
-
os.rename(temp_ebuild_file, ebuild_path)
# Get the path to the parent directory.
@@ -217,6 +234,9 @@
llvm_variant: The LLVM hash to update.
git_hash: The new git hash.
svn_version: The SVN-style revision number of git_hash.
+
+ Yields:
+ lines of the modified ebuild file
"""
is_updated = False
llvm_regex = re.compile('^' + re.escape(llvm_variant.value) +
@@ -266,7 +286,7 @@
os.path.dirname(symlink), 'mv', symlink, new_symlink])
-def UprevEbuildToVersion(symlink, svn_version):
+def UprevEbuildToVersion(symlink, svn_version, git_hash):
"""Uprevs the ebuild's revision number.
Increases the revision number by 1 and stages the change in
@@ -275,31 +295,36 @@
Args:
symlink: The absolute path of an ebuild symlink.
svn_version: The SVN-style revision number of git_hash.
+ git_hash: The new git hash.
Raises:
ValueError: Failed to uprev the ebuild or failed to stage the changes.
+ AssertionError: No llvm version provided for an LLVM uprev
"""
if not os.path.islink(symlink):
raise ValueError('Invalid symlink provided: %s' % symlink)
ebuild = os.path.realpath(symlink)
+ llvm_major_version = get_llvm_hash.GetLLVMMajorVersion(git_hash)
# llvm
package = os.path.basename(os.path.dirname(symlink))
if not package:
raise ValueError('Tried to uprev an unknown package')
- # llvm
if package == 'llvm':
new_ebuild, is_changed = re.subn(
- r'pre([0-9]+)_p([0-9]+)',
- 'pre%s_p%s' % (svn_version, \
- datetime.today().strftime('%Y%m%d')),
+ r'(\d+)\.(\d+)_pre([0-9]+)_p([0-9]+)',
+ '%s.\\2_pre%s_p%s' % (llvm_major_version, svn_version,
+ datetime.datetime.today().strftime('%Y%m%d')),
ebuild,
count=1)
# any other package
else:
- new_ebuild, is_changed = re.subn(
- r'pre([0-9]+)', 'pre%s' % svn_version, ebuild, count=1)
+ new_ebuild, is_changed = re.subn(r'(\d+)\.(\d+)_pre([0-9]+)',
+ '%s.\\2_pre%s' %
+ (llvm_major_version, svn_version),
+ ebuild,
+ count=1)
if not is_changed: # failed to increment the revision number
raise ValueError('Failed to uprev the ebuild.')
@@ -374,13 +399,14 @@
"""
if not os.path.isfile(patch_metadata_file_path):
- raise ValueError(
- 'Invalid patch metadata file provided: %s' % patch_metadata_file_path)
+ raise ValueError('Invalid patch metadata file provided: %s' %
+ patch_metadata_file_path)
# Cmd to stage the patch metadata file for commit.
subprocess.check_output([
'git', '-C',
- os.path.dirname(patch_metadata_file_path), 'add', patch_metadata_file_path
+ os.path.dirname(patch_metadata_file_path), 'add',
+ patch_metadata_file_path
])
@@ -393,15 +419,18 @@
package (key).
commit_messages: The commit message that has the updated ebuilds and
upreving information.
+
+ Returns:
+ commit_messages with new additions
"""
# For each package, check if any patches for that package have
# changed, if so, add which patches have changed to the commit
# message.
for package_name, patch_info_dict in package_info_dict.items():
- if patch_info_dict['disabled_patches'] or \
- patch_info_dict['removed_patches'] or \
- patch_info_dict['modified_metadata']:
+ if (patch_info_dict['disabled_patches']
+ or patch_info_dict['removed_patches']
+ or patch_info_dict['modified_metadata']):
cur_package_header = '\nFor the package %s:' % package_name
commit_messages.append(cur_package_header)
@@ -450,9 +479,9 @@
the patches and its metadata.
mode: The mode of the patch manager when handling an applicable patch
that failed to apply.
- Ex: 'FailureModes.FAIL'
+ Ex. 'FailureModes.FAIL'
git_hash_source: The source of which git hash to use based off of.
- Ex: 'google3', 'tot', or <version> such as 365123
+ Ex. 'google3', 'tot', or <version> such as 365123
extra_commit_msg: extra test to append to the commit message.
Returns:
@@ -478,11 +507,11 @@
if llvm_variant == LLVMVariant.next:
commit_message_header = 'llvm-next'
if git_hash_source in get_llvm_hash.KNOWN_HASH_SOURCES:
- commit_message_header += (
- '/%s: upgrade to %s (r%d)' % (git_hash_source, git_hash, svn_version))
+ commit_message_header += ('/%s: upgrade to %s (r%d)' %
+ (git_hash_source, git_hash, svn_version))
else:
- commit_message_header += (
- ': upgrade to %s (r%d)' % (git_hash, svn_version))
+ commit_message_header += (': upgrade to %s (r%d)' %
+ (git_hash, svn_version))
commit_messages = [
commit_message_header + '\n',
@@ -504,7 +533,7 @@
UpdateEbuildLLVMHash(ebuild_path, llvm_variant, git_hash, svn_version)
if llvm_variant == LLVMVariant.current:
- UprevEbuildToVersion(symlink_path, svn_version)
+ UprevEbuildToVersion(symlink_path, svn_version, git_hash)
else:
UprevEbuildSymlink(symlink_path)
@@ -514,6 +543,8 @@
packages.append('%s/%s' % (parent_dir_name, cur_dir_name))
commit_messages.append('%s/%s' % (parent_dir_name, cur_dir_name))
+ EnsurePackageMaskContains(chroot_path, git_hash)
+
# Handle the patches for each package.
package_info_dict = llvm_patch_management.UpdatePackagesPatchMetadataFile(
chroot_path, svn_version, patch_metadata_file, packages, mode)
@@ -533,6 +564,31 @@
return change_list
+def EnsurePackageMaskContains(chroot_path, git_hash):
+ """Adds the major version of llvm to package.mask if it's not already present.
+
+ Args:
+ chroot_path: The absolute path to the chroot.
+ git_hash: The new git hash.
+
+ Raises:
+ FileExistsError: package.mask not found in ../../chromiumos-overlay
+ """
+
+ llvm_major_version = get_llvm_hash.GetLLVMMajorVersion(git_hash)
+
+ overlay_dir = os.path.join(chroot_path, 'src/third_party/chromiumos-overlay')
+ mask_path = os.path.join(overlay_dir,
+ 'profiles/targets/chromeos/package.mask')
+ with open(mask_path, 'r+') as mask_file:
+ mask_contents = mask_file.read()
+ expected_line = '=sys-devel/llvm-%s.0_pre*\n' % llvm_major_version
+ if expected_line not in mask_contents:
+ mask_file.write(expected_line)
+
+ subprocess.check_output(['git', '-C', overlay_dir, 'add', mask_path])
+
+
def main():
"""Updates the LLVM next hash for each package.
@@ -553,16 +609,16 @@
git_hash, svn_version = get_llvm_hash.GetLLVMHashAndVersionFromSVNOption(
git_hash_source)
- change_list = UpdatePackages(
- args_output.update_packages,
- llvm_variant,
- git_hash,
- svn_version,
- args_output.chroot_path,
- args_output.patch_metadata_file,
- FailureModes(args_output.failure_mode),
- git_hash_source,
- extra_commit_msg=None)
+ change_list = UpdatePackages(args_output.update_packages,
+ llvm_variant,
+ git_hash,
+ svn_version,
+ args_output.chroot_path,
+ args_output.patch_metadata_file,
+ failure_modes.FailureModes(
+ args_output.failure_mode),
+ git_hash_source,
+ extra_commit_msg=None)
print('Successfully updated packages to %s (%d)' % (git_hash, svn_version))
print('Gerrit URL: %s' % change_list.url)
diff --git a/llvm_tools/update_chromeos_llvm_hash_unittest.py b/llvm_tools/update_chromeos_llvm_hash_unittest.py
index 205feb0..adb2059 100755
--- a/llvm_tools/update_chromeos_llvm_hash_unittest.py
+++ b/llvm_tools/update_chromeos_llvm_hash_unittest.py
@@ -8,8 +8,8 @@
from __future__ import print_function
-from collections import namedtuple
-from datetime import datetime
+import collections
+import datetime
import os
import re
import subprocess
@@ -18,6 +18,7 @@
import chroot
import failure_modes
+import get_llvm_hash
import git
import llvm_patch_management
import test_helpers
@@ -30,6 +31,19 @@
class UpdateLLVMHashTest(unittest.TestCase):
"""Test class for updating LLVM hashes of packages."""
+ @mock.patch.object(os.path, 'realpath')
+ def testDefaultCrosRootFromCrOSCheckout(self, mock_llvm_tools):
+ llvm_tools_path = '/path/to/cros/src/third_party/toolchain-utils/llvm_tools'
+ mock_llvm_tools.return_value = llvm_tools_path
+ self.assertEqual(update_chromeos_llvm_hash.defaultCrosRoot(),
+ '%s/../../../../' % llvm_tools_path)
+
+ @mock.patch.object(os.path, 'realpath')
+ def testDefaultCrosRootFromOutsideCrOSCheckout(self, mock_llvm_tools):
+ mock_llvm_tools.return_value = '~/toolchain-utils/llvm_tools'
+ self.assertEqual(update_chromeos_llvm_hash.defaultCrosRoot(),
+ '~/chromiumos')
+
# Simulate behavior of 'os.path.isfile()' when the ebuild path to a package
# does not exist.
@mock.patch.object(os.path, 'isfile', return_value=False)
@@ -67,11 +81,11 @@
# Verify the exception is raised when the ebuild file does not have
# 'LLVM_HASH'.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UpdateEbuildLLVMHash(
- ebuild_file, llvm_variant, git_hash, svn_version)
+ update_chromeos_llvm_hash.UpdateEbuildLLVMHash(ebuild_file,
+ llvm_variant, git_hash,
+ svn_version)
- self.assertEqual(
- str(err.exception), ('Failed to update %s.', 'LLVM_HASH'))
+ self.assertEqual(str(err.exception), 'Failed to update LLVM_HASH')
llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next
@@ -95,11 +109,11 @@
# Verify the exception is raised when the ebuild file does not have
# 'LLVM_NEXT_HASH'.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UpdateEbuildLLVMHash(
- ebuild_file, llvm_variant, git_hash, svn_version)
+ update_chromeos_llvm_hash.UpdateEbuildLLVMHash(ebuild_file,
+ llvm_variant, git_hash,
+ svn_version)
- self.assertEqual(
- str(err.exception), ('Failed to update %s.', 'LLVM_NEXT_HASH'))
+ self.assertEqual(str(err.exception), 'Failed to update LLVM_NEXT_HASH')
self.assertEqual(mock_isfile.call_count, 2)
@@ -179,19 +193,25 @@
mock_stage_commit_command.assert_called_once()
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(os.path, 'islink', return_value=False)
- def testFailedToUprevEbuildToVersionForInvalidSymlink(self, mock_islink):
+ def testFailedToUprevEbuildToVersionForInvalidSymlink(self, mock_islink,
+ mock_llvm_version):
symlink_path = '/path/to/chroot/package/package.ebuild'
svn_version = 1000
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
# Verify the exception is raised when a invalid symbolic link is passed in.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version,
+ git_hash)
self.assertEqual(
str(err.exception), 'Invalid symlink provided: %s' % symlink_path)
mock_islink.assert_called_once()
+ mock_llvm_version.assert_not_called()
@mock.patch.object(os.path, 'islink', return_value=False)
def testFailedToUprevEbuildSymlinkForInvalidSymlink(self, mock_islink):
@@ -206,22 +226,28 @@
mock_islink.assert_called_once()
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
# Simulate 'os.path.islink' when a symbolic link is passed in.
@mock.patch.object(os.path, 'islink', return_value=True)
# Simulate 'os.path.realpath' when a symbolic link is passed in.
@mock.patch.object(os.path, 'realpath', return_value=True)
- def testFailedToUprevEbuildToVersion(self, mock_realpath, mock_islink):
+ def testFailedToUprevEbuildToVersion(self, mock_realpath, mock_islink,
+ mock_llvm_version):
symlink_path = '/path/to/chroot/llvm/llvm_pre123_p.ebuild'
mock_realpath.return_value = '/abs/path/to/llvm/llvm_pre123_p.ebuild'
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
svn_version = 1000
# Verify the exception is raised when the symlink does not match the
# expected pattern
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink_path, svn_version,
+ git_hash)
self.assertEqual(str(err.exception), 'Failed to uprev the ebuild.')
+ mock_llvm_version.assert_called_once_with(git_hash)
mock_islink.assert_called_once_with(symlink_path)
# Simulate 'os.path.islink' when a symbolic link is passed in.
@@ -238,17 +264,24 @@
mock_islink.assert_called_once_with(symlink_path)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(os.path, 'islink', return_value=True)
@mock.patch.object(os.path, 'realpath')
@mock.patch.object(subprocess, 'check_output', return_value=None)
def testSuccessfullyUprevEbuildToVersionLLVM(self, mock_command_output,
- mock_realpath, mock_islink):
- symlink = '/path/to/llvm/llvm_pre3_p2-r10.ebuild'
- ebuild = '/abs/path/to/llvm/llvm_pre3_p2.ebuild'
+ mock_realpath, mock_islink,
+ mock_llvm_version):
+ symlink = '/path/to/llvm/llvm-12.0_pre3_p2-r10.ebuild'
+ ebuild = '/abs/path/to/llvm/llvm-12.0_pre3_p2.ebuild'
mock_realpath.return_value = ebuild
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
svn_version = 1000
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version,
+ git_hash)
+
+ mock_llvm_version.assert_called_once_with(git_hash)
mock_islink.assert_called()
@@ -258,12 +291,8 @@
# Verify commands
symlink_dir = os.path.dirname(symlink)
- new_ebuild, _is_changed = re.subn(
- r'pre([0-9]+)_p([0-9]+)',
- 'pre%s_p%s' % (svn_version, \
- datetime.today().strftime('%Y%m%d')),
- ebuild,
- count=1)
+ timestamp = datetime.datetime.today().strftime('%Y%m%d')
+ new_ebuild = '/abs/path/to/llvm/llvm-1234.0_pre1000_p%s.ebuild' % timestamp
new_symlink = new_ebuild[:-len('.ebuild')] + '-r1.ebuild'
expected_cmd = ['git', '-C', symlink_dir, 'mv', ebuild, new_ebuild]
@@ -282,28 +311,34 @@
self.assertEqual(mock_command_output.call_args_list[3],
mock.call(expected_cmd))
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(os.path, 'islink', return_value=True)
@mock.patch.object(os.path, 'realpath')
@mock.patch.object(subprocess, 'check_output', return_value=None)
def testSuccessfullyUprevEbuildToVersionNonLLVM(self, mock_command_output,
- mock_realpath, mock_islink):
- symlink = '/path/to/compiler-rt/compiler-rt_pre3_p2-r10.ebuild'
- ebuild = '/abs/path/to/compiler-rt/compiler-rt_pre3_p2.ebuild'
+ mock_realpath, mock_islink,
+ mock_llvm_version):
+ symlink = '/abs/path/to/compiler-rt/compiler-rt-12.0_pre314159265-r4.ebuild'
+ ebuild = '/abs/path/to/compiler-rt/compiler-rt-12.0_pre314159265.ebuild'
mock_realpath.return_value = ebuild
+ mock_llvm_version.return_value = '1234'
svn_version = 1000
+ git_hash = '5678'
- update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version)
+ update_chromeos_llvm_hash.UprevEbuildToVersion(symlink, svn_version,
+ git_hash)
mock_islink.assert_called()
mock_realpath.assert_called_once_with(symlink)
+ mock_llvm_version.assert_called_once_with(git_hash)
+
mock_command_output.assert_called()
# Verify commands
symlink_dir = os.path.dirname(symlink)
- new_ebuild, _is_changed = re.subn(
- r'pre([0-9]+)', 'pre%s' % svn_version, ebuild, count=1)
+ new_ebuild = '/abs/path/to/compiler-rt/compiler-rt-1234.0_pre1000.ebuild'
new_symlink = new_ebuild[:-len('.ebuild')] + '-r1.ebuild'
expected_cmd = ['git', '-C', symlink_dir, 'mv', ebuild, new_ebuild]
@@ -350,7 +385,8 @@
# Test function to simulate 'ConvertChrootPathsToAbsolutePaths' when a
# symlink does not start with the prefix '/mnt/host/source'.
- def BadPrefixChrootPath(_chroot_path, _chroot_file_paths):
+ def BadPrefixChrootPath(*args):
+ assert len(args) == 2
raise ValueError('Invalid prefix for the chroot path: '
'%s' % package_chroot_path)
@@ -458,8 +494,7 @@
@mock.patch.object(os.path, 'isfile', return_value=True)
@mock.patch.object(subprocess, 'check_output', return_value=None)
- def testSuccessfullyStagedPatchMetadataFileForCommit(self, mock_run_cmd,
- _mock_isfile):
+ def testSuccessfullyStagedPatchMetadataFileForCommit(self, mock_run_cmd, _):
patch_metadata_path = '/abs/path/to/filesdir/PATCHES.json'
@@ -549,6 +584,7 @@
self.assertEqual(mock_stage_patches_for_commit.call_count, 2)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(update_chromeos_llvm_hash,
'CreatePathDictionaryFromPackages')
@mock.patch.object(git, 'CreateBranch')
@@ -560,20 +596,19 @@
def testExceptionRaisedWhenUpdatingPackages(
self, mock_realpath, mock_delete_repo, mock_upload_changes,
mock_uprev_symlink, mock_update_llvm_next, mock_create_repo,
- mock_create_path_dict):
-
- abs_path_to_package = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- symlink_path_to_package = \
- '/some/path/to/chroot/src/path/to/package-r1.ebuild'
+ mock_create_path_dict, mock_llvm_major_version):
path_to_package_dir = '/some/path/to/chroot/src/path/to'
+ abs_path_to_package = os.path.join(path_to_package_dir, 'package.ebuild')
+ symlink_path_to_package = os.path.join(path_to_package_dir,
+ 'package-r1.ebuild')
+
+ mock_llvm_major_version.return_value = '1234'
# Test function to simulate 'CreateBranch' when successfully created the
# branch on a valid repo path.
- def SuccessfullyCreateBranchForChanges(_repo_path, branch):
+ def SuccessfullyCreateBranchForChanges(_, branch):
self.assertEqual(branch, 'update-LLVM_NEXT_HASH-a123testhash4')
- return
# Test function to simulate 'UpdateEbuildLLVMHash' when successfully
# updated the ebuild's 'LLVM_NEXT_HASH'.
@@ -581,23 +616,23 @@
self.assertEqual(ebuild_path, abs_path_to_package)
self.assertEqual(git_hash, 'a123testhash4')
self.assertEqual(svn_version, 1000)
- return
# Test function to simulate 'UprevEbuildSymlink' when the symlink to the
# ebuild does not have a revision number.
- def FailedToUprevEbuildSymlink(_symlink_path):
+ def FailedToUprevEbuildSymlink(_):
# Raises a 'ValueError' exception because the symlink did not have have a
# revision number.
raise ValueError('Failed to uprev the ebuild.')
# Test function to fail on 'UploadChanges' if the function gets called
# when an exception is raised.
- def ShouldNotExecuteUploadChanges(_repo_path, _git_hash, _commit_messages):
+ def ShouldNotExecuteUploadChanges(*args):
# Test function should not be called (i.e. execution should resume in the
# 'finally' block) because 'UprevEbuildSymlink' raised an
# exception.
- assert False, 'Failed to go to "finally" block ' \
- 'after the exception was raised.'
+ assert len(args) == 3
+ assert False, ('Failed to go to "finally" block '
+ 'after the exception was raised.')
test_package_path_dict = {symlink_path_to_package: abs_path_to_package}
@@ -627,10 +662,12 @@
# Verify exception is raised when an exception is thrown within
# the 'try' block by UprevEbuildSymlink function.
with self.assertRaises(ValueError) as err:
- update_chromeos_llvm_hash.UpdatePackages(
- packages_to_update, llvm_variant, git_hash, svn_version, chroot_path,
- patch_metadata_file, failure_modes.FailureModes.FAIL, git_hash_source,
- extra_commit_msg)
+ update_chromeos_llvm_hash.UpdatePackages(packages_to_update, llvm_variant,
+ git_hash, svn_version,
+ chroot_path, patch_metadata_file,
+ failure_modes.FailureModes.FAIL,
+ git_hash_source,
+ extra_commit_msg)
self.assertEqual(str(err.exception), 'Failed to uprev the ebuild.')
@@ -639,8 +676,9 @@
mock_create_repo.assert_called_once_with(path_to_package_dir, branch)
- mock_update_llvm_next.assert_called_once_with(
- abs_path_to_package, llvm_variant, git_hash, svn_version)
+ mock_update_llvm_next.assert_called_once_with(abs_path_to_package,
+ llvm_variant, git_hash,
+ svn_version)
mock_uprev_symlink.assert_called_once_with(symlink_path_to_package)
@@ -648,6 +686,8 @@
mock_delete_repo.assert_called_once_with(path_to_package_dir, branch)
+ @mock.patch.object(update_chromeos_llvm_hash, 'EnsurePackageMaskContains')
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
@mock.patch.object(update_chromeos_llvm_hash,
'CreatePathDictionaryFromPackages')
@mock.patch.object(git, 'CreateBranch')
@@ -658,23 +698,22 @@
@mock.patch.object(llvm_patch_management, 'UpdatePackagesPatchMetadataFile')
@mock.patch.object(update_chromeos_llvm_hash,
'StagePatchMetadataFileForCommit')
- def testSuccessfullyUpdatedPackages(
- self, mock_stage_patch_file, mock_update_package_metadata_file,
- mock_delete_repo, mock_upload_changes, mock_uprev_symlink,
- mock_update_llvm_next, mock_create_repo, mock_create_path_dict):
-
- abs_path_to_package = '/some/path/to/chroot/src/path/to/package.ebuild'
-
- symlink_path_to_package = \
- '/some/path/to/chroot/src/path/to/package-r1.ebuild'
+ def testSuccessfullyUpdatedPackages(self, mock_stage_patch_file,
+ mock_update_package_metadata_file,
+ mock_delete_repo, mock_upload_changes,
+ mock_uprev_symlink, mock_update_llvm_next,
+ mock_create_repo, mock_create_path_dict,
+ mock_llvm_version, mock_mask_contains):
path_to_package_dir = '/some/path/to/chroot/src/path/to'
+ abs_path_to_package = os.path.join(path_to_package_dir, 'package.ebuild')
+ symlink_path_to_package = os.path.join(path_to_package_dir,
+ 'package-r1.ebuild')
# Test function to simulate 'CreateBranch' when successfully created the
# branch for the changes to be made to the ebuild files.
- def SuccessfullyCreateBranchForChanges(_repo_path, branch):
+ def SuccessfullyCreateBranchForChanges(_, branch):
self.assertEqual(branch, 'update-LLVM_NEXT_HASH-a123testhash5')
- return
# Test function to simulate 'UploadChanges' after a successfull update of
# 'LLVM_NEXT_HASH" of the ebuild file.
@@ -683,7 +722,6 @@
'/some/path/to/chroot/src/path/to/package.ebuild')
self.assertEqual(git_hash, 'a123testhash5')
self.assertEqual(svn_version, 1000)
- return
# Test function to simulate 'UprevEbuildSymlink' when successfully
# incremented the revision number by 1.
@@ -691,8 +729,6 @@
self.assertEqual(symlink_path,
'/some/path/to/chroot/src/path/to/package-r1.ebuild')
- return
-
# Test function to simulate 'UpdatePackagesPatchMetadataFile()' when the
# patch results contains a disabled patch in 'disable_patches' mode.
def RetrievedPatchResults(chroot_path, svn_version, patch_metadata_file,
@@ -704,7 +740,7 @@
self.assertListEqual(packages, ['path/to'])
self.assertEqual(mode, failure_modes.FailureModes.DISABLE_PATCHES)
- PatchInfo = namedtuple('PatchInfo', [
+ PatchInfo = collections.namedtuple('PatchInfo', [
'applied_patches', 'failed_patches', 'non_applicable_patches',
'disabled_patches', 'removed_patches', 'modified_metadata'
])
@@ -727,9 +763,9 @@
# Test function to simulate 'UploadChanges()' when successfully created a
# commit for the changes made to the packages and their patches and
# retrieved the change list of the commit.
- def SuccessfullyUploadedChanges(_repo_path, _branch, _commit_messages):
+ def SuccessfullyUploadedChanges(*args):
+ assert len(args) == 3
commit_url = 'https://some_name/path/to/commit/+/12345'
-
return git.CommitContents(url=commit_url, cl_number=12345)
test_package_path_dict = {symlink_path_to_package: abs_path_to_package}
@@ -746,6 +782,8 @@
mock_uprev_symlink.side_effect = SuccessfullyUprevedEbuildSymlink
mock_update_package_metadata_file.side_effect = RetrievedPatchResults
mock_upload_changes.side_effect = SuccessfullyUploadedChanges
+ mock_llvm_version.return_value = '1234'
+ mock_mask_contains.reurn_value = None
packages_to_update = ['test-packages/package1']
llvm_variant = update_chromeos_llvm_hash.LLVMVariant.next
@@ -772,11 +810,14 @@
mock_create_repo.assert_called_once_with(path_to_package_dir, branch)
- mock_update_llvm_next.assert_called_once_with(
- abs_path_to_package, llvm_variant, git_hash, svn_version)
+ mock_update_llvm_next.assert_called_once_with(abs_path_to_package,
+ llvm_variant, git_hash,
+ svn_version)
mock_uprev_symlink.assert_called_once_with(symlink_path_to_package)
+ mock_mask_contains.assert_called_once_with(chroot_path, git_hash)
+
expected_commit_messages = [
'llvm-next/tot: upgrade to a123testhash5 (r1000)\n',
'The following packages have been updated:', 'path/to',
@@ -796,6 +837,48 @@
mock_delete_repo.assert_called_once_with(path_to_package_dir, branch)
+ @mock.patch.object(subprocess, 'check_output', return_value=None)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
+ def testEnsurePackageMaskContainsExisting(self, mock_llvm_version,
+ mock_git_add):
+ chroot_path = 'absolute/path/to/chroot'
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
+ with mock.patch(
+ 'update_chromeos_llvm_hash.open',
+ mock.mock_open(read_data='\n=sys-devel/llvm-1234.0_pre*\n'),
+ create=True) as mock_file:
+ update_chromeos_llvm_hash.EnsurePackageMaskContains(chroot_path, git_hash)
+ handle = mock_file()
+ handle.write.assert_not_called()
+ mock_llvm_version.assert_called_once_with(git_hash)
+
+ overlay_dir = 'absolute/path/to/chroot/src/third_party/chromiumos-overlay'
+ mask_path = overlay_dir + '/profiles/targets/chromeos/package.mask'
+ mock_git_add.assert_called_once_with(
+ ['git', '-C', overlay_dir, 'add', mask_path])
+
+ @mock.patch.object(subprocess, 'check_output', return_value=None)
+ @mock.patch.object(get_llvm_hash, 'GetLLVMMajorVersion')
+ def testEnsurePackageMaskContainsNotExisting(self, mock_llvm_version,
+ mock_git_add):
+ chroot_path = 'absolute/path/to/chroot'
+ git_hash = 'badf00d'
+ mock_llvm_version.return_value = '1234'
+ with mock.patch(
+ 'update_chromeos_llvm_hash.open',
+ mock.mock_open(read_data='nothing relevant'),
+ create=True) as mock_file:
+ update_chromeos_llvm_hash.EnsurePackageMaskContains(chroot_path, git_hash)
+ handle = mock_file()
+ handle.write.assert_called_once_with('=sys-devel/llvm-1234.0_pre*\n')
+ mock_llvm_version.assert_called_once_with(git_hash)
+
+ overlay_dir = 'absolute/path/to/chroot/src/third_party/chromiumos-overlay'
+ mask_path = overlay_dir + '/profiles/targets/chromeos/package.mask'
+ mock_git_add.assert_called_once_with(
+ ['git', '-C', overlay_dir, 'add', mask_path])
+
if __name__ == '__main__':
unittest.main()
diff --git a/llvm_tools/update_packages_and_run_tests.py b/llvm_tools/update_packages_and_run_tests.py
index b54ba65..2e4a905 100755
--- a/llvm_tools/update_packages_and_run_tests.py
+++ b/llvm_tools/update_packages_and_run_tests.py
@@ -51,10 +51,9 @@
'of updating the packages')
# Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
+ parser.add_argument('--chroot_path',
+ default=cros_root,
+ help='the path to the chroot (default: %(default)s)')
# Add argument to choose between llvm and llvm-next.
parser.add_argument(
@@ -71,65 +70,58 @@
'arguments.')
# Add argument for the LLVM version to use.
- parser.add_argument(
- '--llvm_version',
- type=get_llvm_hash.is_svn_option,
- required=True,
- help='which git hash of LLVM to find '
- '{google3, ToT, <svn_version>} '
- '(default: finds the git hash of the google3 LLVM '
- 'version)')
+ parser.add_argument('--llvm_version',
+ type=get_llvm_hash.IsSvnOption,
+ required=True,
+ help='which git hash of LLVM to find '
+ '{google3, ToT, <svn_version>} '
+ '(default: finds the git hash of the google3 LLVM '
+ 'version)')
# Add argument to add reviewers for the created CL.
- parser.add_argument(
- '--reviewers',
- nargs='+',
- default=[],
- help='The reviewers for the package update changelist')
+ parser.add_argument('--reviewers',
+ nargs='+',
+ default=[],
+ help='The reviewers for the package update changelist')
# Add argument for whether to display command contents to `stdout`.
- parser.add_argument(
- '--verbose',
- action='store_true',
- help='display contents of a command to the terminal '
- '(default: %(default)s)')
+ parser.add_argument('--verbose',
+ action='store_true',
+ help='display contents of a command to the terminal '
+ '(default: %(default)s)')
subparsers = parser.add_subparsers(dest='subparser_name')
subparser_names = []
# Testing with the tryjobs.
tryjob_subparser = subparsers.add_parser('tryjobs')
subparser_names.append('tryjobs')
- tryjob_subparser.add_argument(
- '--builders',
- required=True,
- nargs='+',
- default=[],
- help='builders to use for the tryjob testing')
+ tryjob_subparser.add_argument('--builders',
+ required=True,
+ nargs='+',
+ default=[],
+ help='builders to use for the tryjob testing')
# Add argument for custom options for the tryjob.
- tryjob_subparser.add_argument(
- '--options',
- required=False,
- nargs='+',
- default=[],
- help='options to use for the tryjob testing')
+ tryjob_subparser.add_argument('--options',
+ required=False,
+ nargs='+',
+ default=[],
+ help='options to use for the tryjob testing')
# Testing with the recipe builders
recipe_subparser = subparsers.add_parser('recipe')
subparser_names.append('recipe')
- recipe_subparser.add_argument(
- '--options',
- required=False,
- nargs='+',
- default=[],
- help='options passed to the recipe builders')
+ recipe_subparser.add_argument('--options',
+ required=False,
+ nargs='+',
+ default=[],
+ help='options passed to the recipe builders')
- recipe_subparser.add_argument(
- '--builders',
- required=True,
- nargs='+',
- default=[],
- help='recipe builders to launch')
+ recipe_subparser.add_argument('--builders',
+ required=True,
+ nargs='+',
+ default=[],
+ help='recipe builders to launch')
# Testing with CQ.
cq_subparser = subparsers.add_parser('cq')
@@ -276,10 +268,12 @@
test_output = json.loads(out)
+ buildbucket_id = int(test_output[0]['id'])
+
tests.append({
'launch_time': str(GetCurrentTimeInUTC()),
- 'link': str(test_output[0]['url']),
- 'buildbucket_id': int(test_output[0]['buildbucket_id']),
+ 'link': 'http://ci.chromium.org/b/%s' % buildbucket_id,
+ 'buildbucket_id': buildbucket_id,
'extra_cls': extra_change_lists,
'options': options,
'builder': [builder]
@@ -358,7 +352,8 @@
return None
# Cq-Depend must start a new paragraph prefixed with "Cq-Depend".
- return '\nCq-Depend: ' + ', '.join(('chromium:%s' % i) for i in dependent_cls)
+ return '\nCq-Depend: ' + ', '.join(
+ ('chromium:%s' % i) for i in dependent_cls)
def GetCQIncludeTrybotsString(trybot):
@@ -400,11 +395,6 @@
args_output = GetCommandLineArgs()
- update_packages = [
- 'sys-devel/llvm', 'sys-libs/compiler-rt', 'sys-libs/libcxx',
- 'sys-libs/libcxxabi', 'sys-libs/llvm-libunwind'
- ]
-
patch_metadata_file = 'PATCHES.json'
svn_option = args_output.llvm_version
@@ -418,8 +408,8 @@
# If --last_tested is specified, check if the current run has the same
# arguments last time --last_tested is used.
if args_output.last_tested:
- chroot_file_paths = chroot.GetChrootEbuildPaths(args_output.chroot_path,
- update_packages)
+ chroot_file_paths = chroot.GetChrootEbuildPaths(
+ args_output.chroot_path, update_chromeos_llvm_hash.DEFAULT_PACKAGES)
arg_dict = {
'svn_version': svn_version,
'ebuilds': chroot_file_paths,
@@ -447,7 +437,7 @@
extra_commit_msg += cq_trybot_msg
change_list = update_chromeos_llvm_hash.UpdatePackages(
- update_packages,
+ update_chromeos_llvm_hash.DEFAULT_PACKAGES,
llvm_variant,
git_hash,
svn_version,
@@ -472,9 +462,10 @@
for test in tests:
print(test)
elif args_output.subparser_name == 'recipe':
- tests = StartRecipeBuilders(
- change_list.cl_number, args_output.extra_change_lists,
- args_output.options, args_output.builders, args_output.chroot_path)
+ tests = StartRecipeBuilders(change_list.cl_number,
+ args_output.extra_change_lists,
+ args_output.options, args_output.builders,
+ args_output.chroot_path)
print('Tests:')
for test in tests:
print(test)
diff --git a/llvm_tools/update_packages_and_run_tests_unittest.py b/llvm_tools/update_packages_and_run_tests_unittest.py
index d852893..11f2b7f 100755
--- a/llvm_tools/update_packages_and_run_tests_unittest.py
+++ b/llvm_tools/update_packages_and_run_tests_unittest.py
@@ -46,8 +46,7 @@
def testMatchedLastTestedFile(self):
with test_helpers.CreateTemporaryFile() as last_tested_file:
arg_dict = {
- 'svn_version':
- 1234,
+ 'svn_version': 1234,
'ebuilds': [
'/path/to/package1-r2.ebuild',
'/path/to/package2/package2-r3.ebuild'
@@ -104,8 +103,9 @@
]
self.assertEqual(
- update_packages_and_run_tests.GetTryJobCommand(
- change_list, extra_cls, options, builder), expected_cmd)
+ update_packages_and_run_tests.GetTryJobCommand(change_list, extra_cls,
+ options, builder),
+ expected_cmd)
@mock.patch.object(
update_packages_and_run_tests,
@@ -123,9 +123,9 @@
]
bb_id = '1234'
- url = 'https://some_tryjob_url.com'
+ url = 'http://ci.chromium.org/b/%s' % bb_id
- mock_cmd.return_value = json.dumps([{'buildbucket_id': bb_id, 'url': url}])
+ mock_cmd.return_value = json.dumps([{'id': bb_id, 'url': url}])
chroot_path = '/some/path/to/chroot'
cl = 900
diff --git a/llvm_tools/update_tryjob_status.py b/llvm_tools/update_tryjob_status.py
index f150036..f25fadc 100755
--- a/llvm_tools/update_tryjob_status.py
+++ b/llvm_tools/update_tryjob_status.py
@@ -16,7 +16,6 @@
import sys
import chroot
-from subprocess_helpers import ChrootRunCommand
from test_helpers import CreateTemporaryJsonFile
@@ -32,17 +31,6 @@
# determines the 'status' value of the tryjob).
CUSTOM_SCRIPT = 'custom_script'
- # Uses the result returned by 'cros buildresult'.
- AUTO = 'auto'
-
-
-class BuilderStatus(enum.Enum):
- """Actual values given via 'cros buildresult'."""
-
- PASS = 'pass'
- FAIL = 'fail'
- RUNNING = 'running'
-
class CustomScriptStatus(enum.Enum):
"""Exit code values of a custom script."""
@@ -66,12 +54,6 @@
CustomScriptStatus.SKIP.value: TryjobStatus.SKIP.value
}
-builder_status_mapping = {
- BuilderStatus.PASS.value: TryjobStatus.GOOD.value,
- BuilderStatus.FAIL.value: TryjobStatus.BAD.value,
- BuilderStatus.RUNNING.value: TryjobStatus.PENDING.value
-}
-
def GetCommandLineArgs():
"""Parses the command line for the command line arguments."""
@@ -106,12 +88,6 @@
type=int,
help='The revision to set its status.')
- # Add argument for a specific chroot path.
- parser.add_argument(
- '--chroot_path',
- default=cros_root,
- help='the path to the chroot (default: %(default)s)')
-
# Add argument for the custom script to execute for the 'custom_script'
# option in '--set_status'.
parser.add_argument(
@@ -123,13 +99,14 @@
args_output = parser.parse_args()
- if not os.path.isfile(args_output.status_file) or \
- not args_output.status_file.endswith('.json'):
+ if not (os.path.isfile(
+ args_output.status_file and
+ not args_output.status_file.endswith('.json'))):
raise ValueError('File does not exist or does not ending in ".json" '
': %s' % args_output.status_file)
- if args_output.set_status == TryjobStatus.CUSTOM_SCRIPT.value and \
- not args_output.custom_script:
+ if (args_output.set_status == TryjobStatus.CUSTOM_SCRIPT.value and
+ not args_output.custom_script):
raise ValueError('Please provide the absolute path to the script to '
'execute.')
@@ -165,35 +142,6 @@
return None
-def GetStatusFromCrosBuildResult(chroot_path, buildbucket_id):
- """Retrieves the 'status' using 'cros buildresult'."""
-
- get_buildbucket_id_cmd = [
- 'cros', 'buildresult', '--buildbucket-id',
- str(buildbucket_id), '--report', 'json'
- ]
-
- tryjob_json = ChrootRunCommand(chroot_path, get_buildbucket_id_cmd)
-
- tryjob_contents = json.loads(tryjob_json)
-
- return str(tryjob_contents['%d' % buildbucket_id]['status'])
-
-
-def GetAutoResult(chroot_path, buildbucket_id):
- """Returns the conversion of the result of 'cros buildresult'."""
-
- # Calls 'cros buildresult' to get the status of the tryjob.
- build_result = GetStatusFromCrosBuildResult(chroot_path, buildbucket_id)
-
- # The string returned by 'cros buildresult' might not be in the mapping.
- if build_result not in builder_status_mapping:
- raise ValueError(
- '"cros buildresult" return value is invalid: %s' % build_result)
-
- return builder_status_mapping[build_result]
-
-
def GetCustomScriptResult(custom_script, status_file, tryjob_contents):
"""Returns the conversion of the exit code of the custom script.
@@ -245,18 +193,15 @@
return custom_script_exit_value_mapping[exec_script_cmd_obj.returncode]
-def UpdateTryjobStatus(revision, set_status, status_file, chroot_path,
- custom_script):
+def UpdateTryjobStatus(revision, set_status, status_file, custom_script):
"""Updates a tryjob's 'status' field based off of 'set_status'.
Args:
revision: The revision associated with the tryjob.
set_status: What to update the 'status' field to.
Ex: TryjobStatus.Good, TryjobStatus.BAD, TryjobStatus.PENDING, or
- TryjobStatus.AUTO where TryjobStatus.AUTO uses the result of
- 'cros buildresult'.
+ TryjobStatus.
status_file: The .JSON file that contains the tryjobs.
- chroot_path: The absolute path to the chroot (used by 'cros buildresult').
custom_script: The absolute path to a script that will be executed which
will determine the 'status' value of the tryjob.
"""
@@ -282,8 +227,8 @@
# 'FindTryjobIndex()' returns None if the revision was not found.
if tryjob_index is None:
- raise ValueError(
- 'Unable to find tryjob for %d in %s' % (revision, status_file))
+ raise ValueError('Unable to find tryjob for %d in %s' %
+ (revision, status_file))
# Set 'status' depending on 'set_status' for the tryjob.
if set_status == TryjobStatus.GOOD:
@@ -292,9 +237,6 @@
bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.BAD.value
elif set_status == TryjobStatus.PENDING:
bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.PENDING.value
- elif set_status == TryjobStatus.AUTO:
- bisect_contents['jobs'][tryjob_index]['status'] = GetAutoResult(
- chroot_path, bisect_contents['jobs'][tryjob_index]['buildbucket_id'])
elif set_status == TryjobStatus.SKIP:
bisect_contents['jobs'][tryjob_index]['status'] = TryjobStatus.SKIP.value
elif set_status == TryjobStatus.CUSTOM_SCRIPT:
@@ -315,8 +257,7 @@
args_output = GetCommandLineArgs()
UpdateTryjobStatus(args_output.revision, TryjobStatus(args_output.set_status),
- args_output.status_file, args_output.chroot_path,
- args_output.custom_script)
+ args_output.status_file, args_output.custom_script)
if __name__ == '__main__':
diff --git a/llvm_tools/update_tryjob_status_unittest.py b/llvm_tools/update_tryjob_status_unittest.py
index b5e6556..c42c671 100755
--- a/llvm_tools/update_tryjob_status_unittest.py
+++ b/llvm_tools/update_tryjob_status_unittest.py
@@ -31,14 +31,13 @@
'cl': 'https://some_link_to_tryjob.com',
'status': 'good',
'buildbucket_id': 91835
- },
- {
- 'rev': 1000,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'pending',
- 'buildbucket_id': 10931
- }]
+ }, {
+ 'rev': 1000,
+ 'url': 'https://some_url_to_CL.com',
+ 'cl': 'https://some_link_to_tryjob.com',
+ 'status': 'pending',
+ 'buildbucket_id': 10931
+ }]
expected_index = 0
@@ -55,87 +54,19 @@
'cl': 'https://some_link_to_tryjob.com',
'status': 'bad',
'buildbucket_id': 390
- },
- {
- 'rev': 10,
- 'url': 'https://some_url_to_CL.com',
- 'cl': 'https://some_link_to_tryjob.com',
- 'status': 'skip',
- 'buildbucket_id': 10
- }]
+ }, {
+ 'rev': 10,
+ 'url': 'https://some_url_to_CL.com',
+ 'cl': 'https://some_link_to_tryjob.com',
+ 'status': 'skip',
+ 'buildbucket_id': 10
+ }]
revision_to_find = 250
self.assertIsNone(
update_tryjob_status.FindTryjobIndex(revision_to_find, test_tryjobs))
- # Simulate the behavior of `ChrootRunCommand()` when executing a command
- # inside the chroot.
- @mock.patch.object(update_tryjob_status, 'ChrootRunCommand')
- def testGetStatusFromCrosBuildResult(self, mock_chroot_command):
- tryjob_contents = {
- '192': {
- 'status': 'good',
- 'CleanUpChroot': 'pass',
- 'artifacts_url': None
- }
- }
-
- # Use the test function to simulate 'ChrootRunCommand()' behavior.
- mock_chroot_command.return_value = json.dumps(tryjob_contents)
-
- buildbucket_id = 192
-
- chroot_path = '/some/path/to/chroot'
-
- self.assertEqual(
- update_tryjob_status.GetStatusFromCrosBuildResult(
- chroot_path, buildbucket_id), 'good')
-
- expected_cmd = [
- 'cros', 'buildresult', '--buildbucket-id',
- str(buildbucket_id), '--report', 'json'
- ]
-
- mock_chroot_command.assert_called_once_with(chroot_path, expected_cmd)
-
- # Simulate the behavior of `GetStatusFromCrosBuildResult()` when `cros
- # buildresult` returned a string that is not in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetStatusFromCrosBuildResult',
- return_value='querying')
- def testInvalidCrosBuildResultValue(self, mock_cros_buildresult):
- chroot_path = '/some/path/to/chroot'
- buildbucket_id = 50
-
- # Verify the exception is raised when the return value of `cros buildresult`
- # is not in the `builder_status_mapping`.
- with self.assertRaises(ValueError) as err:
- update_tryjob_status.GetAutoResult(chroot_path, buildbucket_id)
-
- self.assertEqual(
- str(err.exception),
- '"cros buildresult" return value is invalid: querying')
-
- mock_cros_buildresult.assert_called_once_with(chroot_path, buildbucket_id)
-
- # Simulate the behavior of `GetStatusFromCrosBuildResult()` when `cros
- # buildresult` returned a string that is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetStatusFromCrosBuildResult',
- return_value=update_tryjob_status.BuilderStatus.PASS.value)
- def testValidCrosBuildResultValue(self, mock_cros_buildresult):
- chroot_path = '/some/path/to/chroot'
- buildbucket_id = 100
-
- self.assertEqual(
- update_tryjob_status.GetAutoResult(chroot_path, buildbucket_id),
- TryjobStatus.GOOD.value)
-
- mock_cros_buildresult.assert_called_once_with(chroot_path, buildbucket_id)
-
@mock.patch.object(subprocess, 'Popen')
# Simulate the behavior of `os.rename()` when successfully renamed a file.
@mock.patch.object(os, 'rename', return_value=None)
@@ -186,8 +117,9 @@
# does not match any of the exit codes in the mapping of
# `custom_script_exit_value_mapping`.
with self.assertRaises(ValueError) as err:
- update_tryjob_status.GetCustomScriptResult(
- custom_script_path, status_file_path, tryjob_contents)
+ update_tryjob_status.GetCustomScriptResult(custom_script_path,
+ status_file_path,
+ tryjob_contents)
self.assertEqual(str(err.exception), expected_error_message)
@@ -212,8 +144,8 @@
# `Popen.communicate()` returns a tuple of `stdout` and `stderr`.
mock_exec_custom_script.return_value.communicate.return_value = (None, None)
- mock_exec_custom_script.return_value.returncode = \
- CustomScriptStatus.GOOD.value
+ mock_exec_custom_script.return_value.returncode = (
+ CustomScriptStatus.GOOD.value)
tryjob_contents = {
'status': 'good',
@@ -226,8 +158,9 @@
status_file_path = '/abs/path/to/status_file.json'
self.assertEqual(
- update_tryjob_status.GetCustomScriptResult(
- custom_script_path, status_file_path, tryjob_contents),
+ update_tryjob_status.GetCustomScriptResult(custom_script_path,
+ status_file_path,
+ tryjob_contents),
TryjobStatus.GOOD.value)
mock_exec_custom_script.assert_called_once()
@@ -247,16 +180,14 @@
revision_to_update = 369412
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
# Verify the exception is raised when the `status_file` does not have any
# `jobs` (empty).
with self.assertRaises(SystemExit) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, TryjobStatus.GOOD, temp_json_file, chroot_path,
- custom_script)
+ update_tryjob_status.UpdateTryjobStatus(revision_to_update,
+ TryjobStatus.GOOD,
+ temp_json_file, custom_script)
self.assertEqual(str(err.exception), 'No tryjobs in %s' % temp_json_file)
@@ -283,16 +214,14 @@
revision_to_update = 369416
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
# Verify the exception is raised when the `status_file` does not have any
# `jobs` (empty).
with self.assertRaises(ValueError) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, TryjobStatus.SKIP, temp_json_file, chroot_path,
- custom_script)
+ update_tryjob_status.UpdateTryjobStatus(revision_to_update,
+ TryjobStatus.SKIP,
+ temp_json_file, custom_script)
self.assertEqual(
str(err.exception), 'Unable to find tryjob for %d in %s' %
@@ -324,13 +253,11 @@
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(revision_to_update,
TryjobStatus.GOOD, temp_json_file,
- chroot_path, custom_script)
+ custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -365,13 +292,11 @@
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(revision_to_update,
TryjobStatus.BAD, temp_json_file,
- chroot_path, custom_script)
+ custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -407,13 +332,11 @@
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(
revision_to_update, update_tryjob_status.TryjobStatus.SKIP,
- temp_json_file, chroot_path, custom_script)
+ temp_json_file, custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -448,13 +371,11 @@
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- chroot_path = '/abs/path/to/chroot'
-
custom_script = None
update_tryjob_status.UpdateTryjobStatus(
revision_to_update, update_tryjob_status.TryjobStatus.PENDING,
- temp_json_file, chroot_path, custom_script)
+ temp_json_file, custom_script)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -465,69 +386,12 @@
mock_find_tryjob_index.assert_called_once()
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
@mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- # Simulate the behavior of `GetAutoResult()` when `cros buildresult` returns
- # a value that is in the mapping.
- @mock.patch.object(
- update_tryjob_status,
- 'GetAutoResult',
- return_value=TryjobStatus.GOOD.value)
- def testSuccessfullyUpdatedTryjobStatusToAuto(self, mock_get_auto_result,
- mock_find_tryjob_index):
- bisect_test_contents = {
- 'start': 369410,
- 'end': 369420,
- 'jobs': [{
- 'rev': 369411,
- 'status': 'pending',
- 'buildbucket_id': 1200
- }]
- }
-
- # Create a temporary .JSON file to simulate a .JSON file that has bisection
- # contents.
- with CreateTemporaryJsonFile() as temp_json_file:
- with open(temp_json_file, 'w') as f:
- WritePrettyJsonFile(bisect_test_contents, f)
-
- revision_to_update = 369411
-
- # Index of the tryjob that is going to have its 'status' value updated.
- tryjob_index = 0
-
- path_to_chroot = '/abs/path/to/chroot'
-
- custom_script = None
-
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, update_tryjob_status.TryjobStatus.AUTO,
- temp_json_file, path_to_chroot, custom_script)
-
- # Verify that the tryjob's 'status' has been updated in the status file.
- with open(temp_json_file) as status_file:
- bisect_contents = json.load(status_file)
-
- self.assertEqual(bisect_contents['jobs'][tryjob_index]['status'],
- update_tryjob_status.TryjobStatus.GOOD.value)
-
- mock_get_auto_result.assert_called_once_with(
- path_to_chroot,
- bisect_test_contents['jobs'][tryjob_index]['buildbucket_id'])
-
- mock_find_tryjob_index.assert_called_once()
-
- # Simulate the behavior of `FindTryjobIndex()` when the tryjob exists in the
- # status file.
- @mock.patch.object(update_tryjob_status, 'FindTryjobIndex', return_value=0)
- # Simulate the behavior of `GetCustomScriptResult()` when the custom script
- # exit code is in the mapping.
@mock.patch.object(
update_tryjob_status,
'GetCustomScriptResult',
return_value=TryjobStatus.SKIP.value)
- def testSuccessfullyUpdatedTryjobStatusToAuto(
+ def testUpdatedTryjobStatusToAutoPassedWithCustomScript(
self, mock_get_custom_script_result, mock_find_tryjob_index):
bisect_test_contents = {
'start': 369410,
@@ -550,13 +414,11 @@
# Index of the tryjob that is going to have its 'status' value updated.
tryjob_index = 0
- path_to_chroot = '/abs/path/to/chroot'
-
custom_script_path = '/abs/path/to/custom_script.py'
update_tryjob_status.UpdateTryjobStatus(
revision_to_update, update_tryjob_status.TryjobStatus.CUSTOM_SCRIPT,
- temp_json_file, path_to_chroot, custom_script_path)
+ temp_json_file, custom_script_path)
# Verify that the tryjob's 'status' has been updated in the status file.
with open(temp_json_file) as status_file:
@@ -593,8 +455,6 @@
revision_to_update = 369411
- path_to_chroot = '/abs/path/to/chroot'
-
nonexistent_update_status = 'revert_status'
custom_script = None
@@ -602,9 +462,9 @@
# Verify the exception is raised when the `set_status` command line
# argument does not exist in the mapping.
with self.assertRaises(ValueError) as err:
- update_tryjob_status.UpdateTryjobStatus(
- revision_to_update, nonexistent_update_status, temp_json_file,
- path_to_chroot, custom_script)
+ update_tryjob_status.UpdateTryjobStatus(revision_to_update,
+ nonexistent_update_status,
+ temp_json_file, custom_script)
self.assertEqual(
str(err.exception),
diff --git a/llvm_tools/upload_lexan_crashes_to_forcey.py b/llvm_tools/upload_lexan_crashes_to_forcey.py
index b93f51a..61bf6b7 100755
--- a/llvm_tools/upload_lexan_crashes_to_forcey.py
+++ b/llvm_tools/upload_lexan_crashes_to_forcey.py
@@ -6,8 +6,6 @@
"""Fetches and submits the latest test-cases from Lexan's crash bucket."""
-# pylint: disable=cros-logging-import
-
import argparse
import contextlib
import datetime
@@ -101,21 +99,29 @@
shutil.rmtree(loc)
+def download_and_unpack_test_case(gs_url: str, tempdir: str) -> None:
+ suffix = os.path.splitext(gs_url)[1]
+ target_name = 'test_case' + suffix
+ target = os.path.join(tempdir, target_name)
+ subprocess.run(['gsutil.py', 'cp', gs_url, target], check=True)
+ subprocess.run(['tar', 'xaf', target_name], check=True, cwd=tempdir)
+ os.unlink(target)
+
+
def submit_test_case(gs_url: str, cr_tool: str) -> None:
logging.info('Submitting %s', gs_url)
- suffix = os.path.splitext(gs_url)[1]
with temp_dir() as tempdir:
- target_name = 'test_case' + suffix
- target = os.path.join(tempdir, target_name)
- subprocess.run(['gsutil.py', 'cp', gs_url, target], check=True)
- subprocess.run(['tar', 'xaf', target_name], check=True, cwd=tempdir)
- os.unlink(target)
+ download_and_unpack_test_case(gs_url, tempdir)
# Sometimes (e.g., in
# gs://chrome-clang-crash-reports/v1/2020/03/27/
# chromium.clang-ToTiOS-12754-GTXToolKit-2bfcde.tgz)
# we'll get `.crash` files. Unclear why, but let's filter them out anyway.
- repro_files = [x for x in os.listdir(tempdir) if not x.endswith('.crash')]
+ repro_files = [
+ os.path.join(tempdir, x)
+ for x in os.listdir(tempdir)
+ if not x.endswith('.crash')
+ ]
assert len(repro_files) == 2, repro_files
if repro_files[0].endswith('.sh'):
sh_file, src_file = repro_files
@@ -124,6 +130,13 @@
src_file, sh_file = repro_files
assert sh_file.endswith('.sh'), repro_files
+ # Peephole: lexan got a crash upload with a way old clang. Ignore it.
+ with open(sh_file, encoding='utf-8') as f:
+ if 'Crash reproducer for clang version 9.0.0' in f.read():
+ logging.warning('Skipping upload for %s; seems to be with an old clang',
+ gs_url)
+ return
+
subprocess.run(
[
cr_tool,
diff --git a/llvm_tools/upload_lexan_crashes_to_forcey_test.py b/llvm_tools/upload_lexan_crashes_to_forcey_test.py
index 3c9c0d4..937cbf8 100755
--- a/llvm_tools/upload_lexan_crashes_to_forcey_test.py
+++ b/llvm_tools/upload_lexan_crashes_to_forcey_test.py
@@ -7,6 +7,7 @@
"""Tests for upload_lexan_crashes_to_forcey."""
import datetime
+import os
import unittest
import unittest.mock
@@ -117,6 +118,29 @@
),
])
+ @unittest.mock.patch(
+ 'upload_lexan_crashes_to_forcey.download_and_unpack_test_case')
+ @unittest.mock.patch('subprocess.run')
+ def test_test_case_submission_functions(self, subprocess_run_mock,
+ download_and_unpack_mock):
+ mock_gs_url = 'gs://foo/bar/baz'
+
+ def side_effect(gs_url: str, tempdir: str) -> None:
+ self.assertEqual(gs_url, mock_gs_url)
+
+ with open(os.path.join(tempdir, 'test_case.c'), 'w') as f:
+ # All we need is an empty file here.
+ pass
+
+ with open(
+ os.path.join(tempdir, 'test_case.sh'), 'w', encoding='utf-8') as f:
+ f.write('# Crash reproducer for clang version 9.0.0 (...)\n')
+ f.write('clang something or other\n')
+
+ download_and_unpack_mock.side_effect = side_effect
+ upload_lexan_crashes_to_forcey.submit_test_case(mock_gs_url, '4c')
+ subprocess_run_mock.assert_not_called()
+
if __name__ == '__main__':
unittest.main()
diff --git a/lock_machine.py b/lock_machine.py
index 776fe54..03c8c99 100755
--- a/lock_machine.py
+++ b/lock_machine.py
@@ -42,7 +42,7 @@
class MachineType(enum.Enum):
"""Enum class to hold machine type."""
LOCAL = 'local'
- SKYLAB = 'skylab'
+ CROSFLEET = 'crosfleet'
class LockManager(object):
@@ -53,15 +53,15 @@
machines, using appropriate locking mechanisms for each.
"""
- SKYLAB_PATH = 'skylab'
+ CROSFLEET_PATH = 'crosfleet'
# TODO(zhizhouy): lease time may needs to be dynamically adjusted. For now we
# set it long enough to cover the period to finish nightly rotation tests.
LEASE_MINS = 1439
- SKYLAB_CREDENTIAL = ('/usr/local/google/home/mobiletc-prebuild'
- '/sheriff_utils/credentials/skylab'
- '/chromeos-swarming-credential.json')
+ CROSFLEET_CREDENTIAL = ('/usr/local/google/home/mobiletc-prebuild'
+ '/sheriff_utils/credentials/skylab'
+ '/chromeos-swarming-credential.json')
SWARMING = 'chromite/third_party/swarming.client/swarming.py'
SUCCESS = 0
@@ -102,7 +102,7 @@
self.force = force_option
self.local_machines = []
- self.skylab_machines = []
+ self.crosfleet_machines = []
def CheckMachine(self, machine, error_msg):
"""Verifies that machine is responding to ping.
@@ -125,8 +125,8 @@
Returns:
A list of names of the toolchain machines in the ChromeOS HW lab.
"""
- machines_file = os.path.join(
- os.path.dirname(__file__), 'crosperf', 'default_remotes')
+ machines_file = os.path.join(os.path.dirname(__file__), 'crosperf',
+ 'default_remotes')
machine_list = []
with open(machines_file, 'r') as input_file:
lines = input_file.readlines()
@@ -148,8 +148,8 @@
"""
if m in self.local_machines:
return MachineType.LOCAL
- if m in self.skylab_machines:
- return MachineType.SKYLAB
+ if m in self.crosfleet_machines:
+ return MachineType.CROSFLEET
def PrintStatusHeader(self):
"""Prints the status header lines for machines."""
@@ -166,12 +166,12 @@
"""
if state['locked']:
print('%s (%s)\t\t%slocked by %s since %s' %
- (m, state['board'], '\t\t' if machine_type == MachineType.LOCAL else
- '', state['locked_by'], state['lock_time']))
+ (m, state['board'], '\t\t' if machine_type == MachineType.LOCAL
+ else '', state['locked_by'], state['lock_time']))
else:
- print(
- '%s (%s)\t\t%sunlocked' % (m, state['board'], '\t\t' if
- machine_type == MachineType.LOCAL else ''))
+ print('%s (%s)\t\t%sunlocked' %
+ (m, state['board'],
+ '\t\t' if machine_type == MachineType.LOCAL else ''))
def AddMachineToLocal(self, machine):
"""Adds a machine to local machine list.
@@ -182,14 +182,14 @@
if machine not in self.local_machines:
self.local_machines.append(machine)
- def AddMachineToSkylab(self, machine):
- """Adds a machine to skylab machine list.
+ def AddMachineToCrosfleet(self, machine):
+ """Adds a machine to crosfleet machine list.
Args:
machine: The machine to be added.
"""
- if machine not in self.skylab_machines:
- self.skylab_machines.append(machine)
+ if machine not in self.crosfleet_machines:
+ self.crosfleet_machines.append(machine)
def ListMachineStates(self, machine_states):
"""Gets and prints the current status for a list of machines.
@@ -208,8 +208,8 @@
state = machine_states[m]
self.PrintStatus(m, state, machine_type)
- def UpdateLockInSkylab(self, should_lock_machine, machine):
- """Ask skylab to lease/release a machine.
+ def UpdateLockInCrosfleet(self, should_lock_machine, machine):
+ """Ask crosfleet to lease/release a machine.
Args:
should_lock_machine: Boolean indicating whether to lock the machine (True)
@@ -221,9 +221,9 @@
"""
try:
if should_lock_machine:
- ret = self.LeaseSkylabMachine(machine)
+ ret = self.LeaseCrosfleetMachine(machine)
else:
- ret = self.ReleaseSkylabMachine(machine)
+ ret = self.ReleaseCrosfleetMachine(machine)
except Exception:
return False
return ret
@@ -268,18 +268,18 @@
# TODO(zhizhouy): Handling exceptions with more details when locking
# doesn't succeed.
machine_type = self.GetMachineType(m)
- if machine_type == MachineType.SKYLAB:
- ret = self.UpdateLockInSkylab(lock_machines, m)
+ if machine_type == MachineType.CROSFLEET:
+ ret = self.UpdateLockInCrosfleet(lock_machines, m)
elif machine_type == MachineType.LOCAL:
ret = self.UpdateFileLock(lock_machines, m)
if ret:
- self.logger.LogOutput(
- '%s %s machine succeeded: %s.' % (action, machine_type.value, m))
+ self.logger.LogOutput('%s %s machine succeeded: %s.' %
+ (action, machine_type.value, m))
updated_machines.append(m)
else:
- self.logger.LogOutput(
- '%s %s machine failed: %s.' % (action, machine_type.value, m))
+ self.logger.LogOutput('%s %s machine failed: %s.' %
+ (action, machine_type.value, m))
self.machines = updated_machines
return updated_machines
@@ -323,10 +323,10 @@
'(%s).' % k)
self._InternalRemoveMachine(k)
- # TODO(zhizhouy): Skylab doesn't support host info such as locked_by.
- # Need to update this when skylab supports it.
- if (state['locked'] and state['locked_by'] and
- state['locked_by'] != self.user):
+ # TODO(zhizhouy): Crosfleet doesn't support host info such as locked_by.
+ # Need to update this when crosfleet supports it.
+ if (state['locked'] and state['locked_by']
+ and state['locked_by'] != self.user):
raise DontOwnLock('Attempt to unlock machine (%s) locked by someone '
'else (%s).' % (k, state['locked_by']))
elif cmd == 'lock':
@@ -352,10 +352,11 @@
"""
machine_list = {}
for m in self.machines:
- # For local or skylab machines, we simply set {'locked': status} for them
- # TODO(zhizhouy): This is a quick fix since skylab cannot return host info
- # as afe does. We need to get more info such as locked_by when skylab
- # supports that.
+ # For local or crosfleet machines, we simply set {'locked': status} for
+ # them
+ # TODO(zhizhouy): This is a quick fix since crosfleet cannot return host
+ # info as afe does. We need to get more info such as locked_by when
+ # crosfleet supports that.
values = {
'locked': 0 if cmd == 'lock' else 1,
'board': '??',
@@ -368,31 +369,29 @@
return machine_list
- def CheckMachineInSkylab(self, machine):
- """Run command to check if machine is in Skylab or not.
+ def CheckMachineInCrosfleet(self, machine):
+ """Run command to check if machine is in Crosfleet or not.
Returns:
- True if machine in skylab, else False
+ True if machine in crosfleet, else False
"""
credential = ''
- if os.path.exists(self.SKYLAB_CREDENTIAL):
- credential = '--auth-service-account-json %s' % self.SKYLAB_CREDENTIAL
+ if os.path.exists(self.CROSFLEET_CREDENTIAL):
+ credential = '--auth-service-account-json %s' % self.CROSFLEET_CREDENTIAL
swarming = os.path.join(self.chromeos_root, self.SWARMING)
# TODO(zhizhouy): Swarming script doesn't support python3 so explicitly
# launch it with python2 until migrated.
- cmd = (('python2 %s ' \
- 'query --swarming https://chromeos-swarming.appspot.com ' \
- "%s 'bots/list?is_dead=FALSE&dimensions=dut_name:%s'") % \
- (swarming,
- credential,
- machine.rstrip('.cros')))
+ cmd = (('python2 %s '
+ 'query --swarming https://chromeos-swarming.appspot.com '
+ "%s 'bots/list?is_dead=FALSE&dimensions=dut_name:%s'") %
+ (swarming, credential, machine.rstrip('.cros')))
exit_code, stdout, stderr = self.ce.RunCommandWOutput(cmd)
if exit_code:
- raise ValueError(
- 'Querying bots failed (2); stdout: %r; stderr: %r' % (stdout, stderr))
+ raise ValueError('Querying bots failed (2); stdout: %r; stderr: %r' %
+ (stdout, stderr))
- # The command will return a json output as stdout. If machine not in skylab
- # stdout will look like this:
+ # The command will return a json output as stdout. If machine not in
+ # crosfleet, stdout will look like this:
# {
# "death_timeout": "600",
# "now": "TIMESTAMP"
@@ -401,39 +400,35 @@
# this keyword for result.
return 'items' in stdout
- def LeaseSkylabMachine(self, machine):
- """Run command to lease dut from skylab.
+ def LeaseCrosfleetMachine(self, machine):
+ """Run command to lease dut from crosfleet.
Returns:
True if succeeded, False if failed.
"""
credential = ''
- if os.path.exists(self.SKYLAB_CREDENTIAL):
- credential = '-service-account-json %s' % self.SKYLAB_CREDENTIAL
- cmd = (('%s lease-dut -minutes %s %s %s') % \
- (self.SKYLAB_PATH,
- self.LEASE_MINS,
- credential,
- machine.rstrip('.cros')))
- # Wait 120 seconds for server to start the lease task, if not started,
+ if os.path.exists(self.CROSFLEET_CREDENTIAL):
+ credential = '-service-account-json %s' % self.CROSFLEET_CREDENTIAL
+ cmd = (('%s dut lease -minutes %s %s %s %s') %
+ (self.CROSFLEET_PATH, self.LEASE_MINS, credential, '-host'
+ if '.cros' in machine else '-board', machine.rstrip('.cros')))
+ # Wait 8 minutes for server to start the lease task, if not started,
# we will treat it as unavailable.
- check_interval_time = 120
+ check_interval_time = 480
retval = self.ce.RunCommand(cmd, command_timeout=check_interval_time)
return retval == self.SUCCESS
- def ReleaseSkylabMachine(self, machine):
- """Run command to release dut from skylab.
+ def ReleaseCrosfleetMachine(self, machine):
+ """Run command to release dut from crosfleet.
Returns:
True if succeeded, False if failed.
"""
credential = ''
- if os.path.exists(self.SKYLAB_CREDENTIAL):
- credential = '-service-account-json %s' % self.SKYLAB_CREDENTIAL
- cmd = (('%s release-dut %s %s') % \
- (self.SKYLAB_PATH,
- credential,
- machine.rstrip('.cros')))
+ if os.path.exists(self.CROSFLEET_CREDENTIAL):
+ credential = '-service-account-json %s' % self.CROSFLEET_CREDENTIAL
+ cmd = (('%s dut abandon %s %s') %
+ (self.CROSFLEET_PATH, credential, machine.rstrip('.cros')))
retval = self.ce.RunCommand(cmd)
return retval == self.SUCCESS
@@ -449,44 +444,39 @@
"""
parser = argparse.ArgumentParser()
- parser.add_argument(
- '--list',
- dest='cmd',
- action='store_const',
- const='status',
- help='List current status of all known machines.')
- parser.add_argument(
- '--lock',
- dest='cmd',
- action='store_const',
- const='lock',
- help='Lock given machine(s).')
- parser.add_argument(
- '--unlock',
- dest='cmd',
- action='store_const',
- const='unlock',
- help='Unlock given machine(s).')
- parser.add_argument(
- '--status',
- dest='cmd',
- action='store_const',
- const='status',
- help='List current status of given machine(s).')
- parser.add_argument(
- '--remote', dest='remote', help='machines on which to operate')
- parser.add_argument(
- '--chromeos_root',
- dest='chromeos_root',
- required=True,
- help='ChromeOS root to use for autotest scripts.')
- parser.add_argument(
- '--force',
- dest='force',
- action='store_true',
- default=False,
- help='Force lock/unlock of machines, even if not'
- ' current lock owner.')
+ parser.add_argument('--list',
+ dest='cmd',
+ action='store_const',
+ const='status',
+ help='List current status of all known machines.')
+ parser.add_argument('--lock',
+ dest='cmd',
+ action='store_const',
+ const='lock',
+ help='Lock given machine(s).')
+ parser.add_argument('--unlock',
+ dest='cmd',
+ action='store_const',
+ const='unlock',
+ help='Unlock given machine(s).')
+ parser.add_argument('--status',
+ dest='cmd',
+ action='store_const',
+ const='status',
+ help='List current status of given machine(s).')
+ parser.add_argument('--remote',
+ dest='remote',
+ help='machines on which to operate')
+ parser.add_argument('--chromeos_root',
+ dest='chromeos_root',
+ required=True,
+ help='ChromeOS root to use for autotest scripts.')
+ parser.add_argument('--force',
+ dest='force',
+ action='store_true',
+ default=False,
+ help='Force lock/unlock of machines, even if not'
+ ' current lock owner.')
options = parser.parse_args(argv)
@@ -504,7 +494,8 @@
if options.remote:
machine_list = options.remote.split()
- lock_manager = LockManager(machine_list, options.force, options.chromeos_root)
+ lock_manager = LockManager(machine_list, options.force,
+ options.chromeos_root)
machine_states = lock_manager.GetMachineStates(cmd=options.cmd)
cmd = options.cmd
diff --git a/make_root_writable.py b/make_root_writable.py
new file mode 100755
index 0000000..0163adf
--- /dev/null
+++ b/make_root_writable.py
@@ -0,0 +1,227 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to make / directory on chromebook writable.
+
+This script updates a remote chromebook to make the / directory writable."
+"""
+
+from __future__ import print_function
+
+__author__ = '[email protected] (Caroline Tice)'
+
+import argparse
+import os
+import sys
+import time
+
+from cros_utils import command_executer
+from cros_utils import locks
+from cros_utils import logger
+from cros_utils import machines
+from cros_utils import misc
+
+lock_file = '/tmp/image_chromeos_lock/image_chromeos_lock'
+
+
+def Usage(parser, message):
+ print('ERROR: %s' % message)
+ parser.print_help()
+ sys.exit(0)
+
+
+def RebootChromebook(chromeos_root, remote, cmd_executer):
+ cmd = 'sudo reboot'
+ cmd_executer.CrosRunCommand(cmd, chromeos_root=chromeos_root, machine=remote)
+ time.sleep(10)
+ success = False
+ for _ in range(1, 10):
+ if machines.MachineIsPingable(remote):
+ success = True
+ break
+ time.sleep(1)
+ return success
+
+
+def ParseOutput(output):
+ # See comment in FindPartitionNum.
+ lines = output.split('\n')
+ num_str = '-1'
+ for line in lines:
+ l = line.strip()
+ words = l.split()
+ if (len(words) > 2 and words[0] == 'sudo' and
+ words[1] == '/usr/share/vboot/bin/make_dev_ssd.sh' and
+ words[-2] == '--partitions'):
+ num_str = words[-1]
+ break
+ num = int(num_str)
+
+ return num
+
+
+def FindPartitionNum(chromeos_root, remote, logs, cmd_executer):
+ partition_cmd = ('/usr/share/vboot/bin/make_dev_ssd.sh '
+ '--remove_rootfs_verification')
+ _, output, _ = cmd_executer.CrosRunCommandWOutput(
+ partition_cmd,
+ chromeos_root=chromeos_root,
+ machine=remote,
+ terminated_timeout=10)
+
+ # The command above, with no --partitions flag, should return output
+ # in the following form:
+
+ # make_dev_ssd.sh: INFO: checking system firmware...
+ #
+ # ERROR: YOU ARE TRYING TO MODIFY THE LIVE SYSTEM IMAGE /dev/mmcblk0.
+ #
+ # The system may become unusable after that change, especially when you have
+ # some auto updates in progress. To make it safer, we suggest you to only
+ # change the partition you have booted with. To do that, re-execute this
+ # command as:
+ #
+ # sudo /usr/share/vboot/bin/make_dev_ssd.sh --partitions 4
+ #
+ # If you are sure to modify other partition, please invoke the command again
+ # and explicitly assign only one target partition for each time
+ # (--partitions N )
+ #
+ # make_dev_ssd.sh: ERROR: IMAGE /dev/mmcblk0 IS NOT MODIFIED.
+
+ # We pass this output to the ParseOutput function where it finds the 'sudo'
+ # line with the partition number and returns the partition number.
+
+ num = ParseOutput(output)
+
+ if num == -1:
+ logs.LogOutput('Failed to find partition number in "%s"' % output)
+ return num
+
+
+def TryRemoveRootfsFromPartition(chromeos_root, remote, cmd_executer,
+ partition_num):
+ partition_cmd = ('/usr/share/vboot/bin/make_dev_ssd.sh '
+ '--remove_rootfs_verification --partition %d' %
+ partition_num)
+ ret = cmd_executer.CrosRunCommand(
+ partition_cmd,
+ chromeos_root=chromeos_root,
+ machine=remote,
+ terminated_timeout=10)
+ return ret
+
+
+def TryRemountPartitionAsRW(chromeos_root, remote, cmd_executer):
+ command = 'sudo mount -o remount,rw /'
+ ret = cmd_executer.CrosRunCommand(
+ command,
+ chromeos_root=chromeos_root,
+ machine=remote,
+ terminated_timeout=10)
+ return ret
+
+
+def Main(argv):
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ '-c',
+ '--chromeos_root',
+ dest='chromeos_root',
+ help='Target directory for ChromeOS installation.')
+ parser.add_argument('-r', '--remote', dest='remote', help='Target device.')
+ parser.add_argument(
+ '-n',
+ '--no_lock',
+ dest='no_lock',
+ default=False,
+ action='store_true',
+ help='Do not attempt to lock remote before imaging. '
+ 'This option should only be used in cases where the '
+ 'exclusive lock has already been acquired (e.g. in '
+ 'a script that calls this one).')
+
+ options = parser.parse_args(argv[1:])
+
+ # Common initializations
+ log_level = 'average'
+ cmd_executer = command_executer.GetCommandExecuter(log_level=log_level)
+ l = logger.GetLogger()
+
+ if options.chromeos_root is None:
+ Usage(parser, '--chromeos_root must be set')
+
+ if options.remote is None:
+ Usage(parser, '--remote must be set')
+
+ options.chromeos_root = os.path.expanduser(options.chromeos_root)
+
+ try:
+ should_unlock = False
+ if not options.no_lock:
+ try:
+ _ = locks.AcquireLock(
+ list(options.remote.split()), options.chromeos_root)
+ should_unlock = True
+ except Exception as e:
+ raise RuntimeError('Error acquiring machine: %s' % str(e))
+
+ # Workaround for crosbug.com/35684.
+ os.chmod(misc.GetChromeOSKeyFile(options.chromeos_root), 0o600)
+
+ if log_level == 'average':
+ cmd_executer.SetLogLevel('verbose')
+
+ if not machines.MachineIsPingable(options.remote):
+ raise RuntimeError('Machine %s does not appear to be up.' %
+ options.remote)
+
+ ret = TryRemountPartitionAsRW(options.chromeos_root, options.remote,
+ cmd_executer)
+
+ if ret != 0:
+ l.LogOutput('Initial mount command failed. Looking for root partition'
+ ' number.')
+ part_num = FindPartitionNum(options.chromeos_root, options.remote, l,
+ cmd_executer)
+ if part_num != -1:
+ l.LogOutput('Attempting to remove rootfs verification on partition %d' %
+ part_num)
+ ret = TryRemoveRootfsFromPartition(options.chromeos_root,
+ options.remote, cmd_executer,
+ part_num)
+ if ret == 0:
+ l.LogOutput('Succeeded in removing roofs verification from'
+ ' partition %d. Rebooting...' % part_num)
+ if not RebootChromebook(options.chromeos_root, options.remote,
+ cmd_executer):
+ raise RuntimeError('Chromebook failed to reboot.')
+ l.LogOutput('Reboot succeeded. Attempting to remount partition.')
+ ret = TryRemountPartitionAsRW(options.chromeos_root, options.remote,
+ cmd_executer)
+ if ret == 0:
+ l.LogOutput('Re-mounted / as writable.')
+ else:
+ l.LogOutput('Re-mount failed. / is not writable.')
+ else:
+ l.LogOutput('Failed to remove rootfs verification from partition'
+ ' %d.' % part_num)
+ else:
+ l.LogOutput('Re-mounted / as writable.')
+
+ l.LogOutput('Exiting.')
+
+ finally:
+ if should_unlock:
+ locks.ReleaseLock(list(options.remote.split()), options.chromeos_root)
+
+ return ret
+
+
+if __name__ == '__main__':
+ retval = Main(sys.argv)
+ sys.exit(retval)
diff --git a/pgo_tools/monitor_pgo_profiles.py b/pgo_tools/monitor_pgo_profiles.py
index cb41eb8..5c17423 100755
--- a/pgo_tools/monitor_pgo_profiles.py
+++ b/pgo_tools/monitor_pgo_profiles.py
@@ -5,18 +5,13 @@
"""Emails the mage if PGO profile generation hasn't succeeded recently."""
-# pylint: disable=cros-logging-import
-
import argparse
import datetime
-import sys
-import subprocess
import logging
+import subprocess
+import sys
from typing import List, NamedTuple, Optional, Tuple
-from cros_utils import email_sender
-from cros_utils import tiny_render
-
PGO_BUILDBOT_LINK = ('https://ci.chromium.org/p/chromeos/builders/toolchain/'
'pgo-generate-llvm-next-orchestrator')
@@ -56,47 +51,35 @@
return max(infos)
-def compose_complaint_email(
+def compose_complaint(
out_of_date_profiles: List[Tuple[datetime.datetime, ProfdataInfo]]
-) -> Optional[Tuple[str, tiny_render.Piece]]:
+) -> Optional[str]:
if not out_of_date_profiles:
return None
if len(out_of_date_profiles) == 1:
- subject = '1 llvm profile is out of date'
- body = ['out-of-date profile:']
+ body_lines = ['1 profile is out of date:']
else:
- subject = f'{len(out_of_date_profiles)} llvm profiles are out of date'
- body = ['out-of-date profiles:']
+ body_lines = [f'{len(out_of_date_profiles)} profiles are out of date:']
- out_of_date_items = []
for arch, profdata_info in out_of_date_profiles:
- out_of_date_items.append(
- f'{arch} (most recent profile was from {profdata_info.date} at '
+ body_lines.append(
+ f'- {arch} (most recent profile was from {profdata_info.date} at '
f'{profdata_info.location!r})')
- body += [
- tiny_render.UnorderedList(out_of_date_items),
- tiny_render.line_break,
- tiny_render.line_break,
+ body_lines.append('\n')
+ body_lines.append(
'PTAL to see if the llvm-pgo-generate bots are functioning normally. '
- 'Their status can be found at ',
- tiny_render.Link(href=PGO_BUILDBOT_LINK, inner=PGO_BUILDBOT_LINK),
- '.',
- ]
- return subject, body
+ f'Their status can be found at {PGO_BUILDBOT_LINK}.')
+ return '\n'.join(body_lines)
def main() -> None:
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--dry_run',
- action='store_true',
- help="Don't actually send an email",
- )
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'--max_age_days',
# These builders run ~weekly. If we fail to generate two in a row,
@@ -121,29 +104,12 @@
if age >= max_age:
out_of_date_profiles.append((arch, most_recent))
- email = compose_complaint_email(out_of_date_profiles)
- if not email:
- logging.info('No email to send; quit')
- return
+ complaint = compose_complaint(out_of_date_profiles)
+ if complaint:
+ logging.error('%s', complaint)
+ sys.exit(1)
- subject, body = email
-
- identifier = 'llvm-pgo-monitor'
- subject = f'[{identifier}] {subject}'
-
- logging.info('Sending email with title %r', subject)
- if args.dry_run:
- logging.info('Dry run specified\nSubject: %s\nBody:\n%s', subject,
- tiny_render.render_text_pieces(body))
- else:
- email_sender.EmailSender().SendX20Email(
- subject=subject,
- identifier=identifier,
- well_known_recipients=['mage'],
- direct_recipients=['[email protected]'],
- text_body=tiny_render.render_text_pieces(body),
- html_body=tiny_render.render_html_pieces(body),
- )
+ logging.info('Nothing seems wrong')
if __name__ == '__main__':
diff --git a/pgo_tools/monitor_pgo_profiles_unittest.py b/pgo_tools/monitor_pgo_profiles_unittest.py
index b4e085e..eef3388 100755
--- a/pgo_tools/monitor_pgo_profiles_unittest.py
+++ b/pgo_tools/monitor_pgo_profiles_unittest.py
@@ -11,41 +11,36 @@
import unittest.mock
import monitor_pgo_profiles
-from cros_utils import tiny_render
class Test(unittest.TestCase):
"""Tests for monitor_pgo_profiles."""
+ def test_compose_complaint_with_zero_out_of_date(self):
+ self.assertIsNone(monitor_pgo_profiles.compose_complaint([]))
- def test_compose_complaint_email_with_zero_out_of_date(self):
- self.assertIsNone(monitor_pgo_profiles.compose_complaint_email([]))
-
- def test_compose_complaint_email_with_one_out_of_date(self):
+ def test_compose_complaint_with_one_out_of_date(self):
profdata_info = monitor_pgo_profiles.ProfdataInfo(
date=datetime.datetime(2020, 1, 2, 3, 4, 5),
location='gs://somewhere',
)
- result = monitor_pgo_profiles.compose_complaint_email([
+ result = monitor_pgo_profiles.compose_complaint([
('some_arch', profdata_info),
])
- self.assertEqual(result, ('1 llvm profile is out of date', [
- 'out-of-date profile:',
- tiny_render.UnorderedList([
- f'some_arch (most recent profile was from {profdata_info.date} at '
- f'{profdata_info.location!r})'
- ]),
- tiny_render.line_break,
- tiny_render.line_break,
- 'PTAL to see if the llvm-pgo-generate bots are functioning normally. '
- 'Their status can be found at ',
- tiny_render.Link(
- href=monitor_pgo_profiles.PGO_BUILDBOT_LINK,
- inner=monitor_pgo_profiles.PGO_BUILDBOT_LINK,
- ),
- '.',
- ]))
+ self.assertEqual(
+ result,
+ '\n'.join((
+ '1 profile is out of date:',
+ f'- some_arch (most recent profile was from {profdata_info.date} '
+ f'at {profdata_info.location!r})',
+ '',
+ '',
+ 'PTAL to see if the llvm-pgo-generate bots are functioning '
+ 'normally. Their status can be found at '
+ f'{monitor_pgo_profiles.PGO_BUILDBOT_LINK}.',
+ )),
+ )
- def test_compose_complaint_email_with_two_out_of_date(self):
+ def test_compose_complaint_with_two_out_of_date(self):
profdata_info_1 = monitor_pgo_profiles.ProfdataInfo(
date=datetime.datetime(2020, 1, 2, 3, 4, 5),
location='gs://somewhere',
@@ -54,28 +49,25 @@
date=datetime.datetime(2020, 3, 2, 1, 4, 5),
location='gs://somewhere-else',
)
- result = monitor_pgo_profiles.compose_complaint_email([
+ result = monitor_pgo_profiles.compose_complaint([
('some_arch', profdata_info_1),
('some_other_arch', profdata_info_2),
])
- self.assertEqual(result, ('2 llvm profiles are out of date', [
- 'out-of-date profiles:',
- tiny_render.UnorderedList([
- f'some_arch (most recent profile was from {profdata_info_1.date} '
+ self.assertEqual(
+ result,
+ '\n'.join((
+ '2 profiles are out of date:',
+ f'- some_arch (most recent profile was from {profdata_info_1.date} '
f'at {profdata_info_1.location!r})',
- f'some_other_arch (most recent profile was from '
- f'{profdata_info_2.date} at {profdata_info_2.location!r})'
- ]),
- tiny_render.line_break,
- tiny_render.line_break,
- 'PTAL to see if the llvm-pgo-generate bots are functioning normally. '
- 'Their status can be found at ',
- tiny_render.Link(
- href=monitor_pgo_profiles.PGO_BUILDBOT_LINK,
- inner=monitor_pgo_profiles.PGO_BUILDBOT_LINK,
- ),
- '.',
- ]))
+ f'- some_other_arch (most recent profile was from '
+ f'{profdata_info_2.date} at {profdata_info_2.location!r})',
+ '',
+ '',
+ 'PTAL to see if the llvm-pgo-generate bots are functioning '
+ 'normally. Their status can be found at '
+ f'{monitor_pgo_profiles.PGO_BUILDBOT_LINK}.',
+ )),
+ )
@unittest.mock.patch.object(subprocess, 'run')
def test_fetching_profdata_functions(self, subprocess_run_mock):
diff --git a/run_tests_for.py b/run_tests_for.py
index 19f8172..807a218 100755
--- a/run_tests_for.py
+++ b/run_tests_for.py
@@ -74,21 +74,19 @@
"""Returns all files that appear to be Python tests in a given directory."""
subdir = os.path.join(toolchain_utils, rel_subdir)
test_files = (
- os.path.join(subdir, file_name)
- for file_name in os.listdir(subdir)
+ os.path.join(subdir, file_name) for file_name in os.listdir(subdir)
if file_name.endswith('_test.py') or file_name.endswith('_unittest.py'))
return _filter_python_tests(test_files, toolchain_utils)
def _run_test(test_spec):
"""Runs a test."""
- p = subprocess.Popen(
- test_spec.command,
- cwd=test_spec.directory,
- stdin=open('/dev/null'),
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- encoding='utf-8')
+ p = subprocess.Popen(test_spec.command,
+ cwd=test_spec.directory,
+ stdin=open('/dev/null'),
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ encoding='utf-8')
stdout, _ = p.communicate()
exit_code = p.wait()
return exit_code, stdout
@@ -115,12 +113,23 @@
if not test_file.endswith('.py'):
return []
- test_suffixes = ['_test.py', '_unittest.py']
- if any(test_file.endswith(x) for x in test_suffixes):
+ test_prefixes = ('test_', 'unittest_')
+ test_suffixes = ('_test.py', '_unittest.py')
+
+ test_file_name = os.path.basename(test_file)
+ test_file_is_a_test = (
+ any(test_file_name.startswith(x) for x in test_prefixes)
+ or any(test_file_name.endswith(x) for x in test_suffixes))
+
+ if test_file_is_a_test:
test_files = [test_file]
else:
- base = test_file[:-3]
- candidates = (base + x for x in test_suffixes)
+ test_file_no_suffix = test_file[:-3]
+ candidates = [test_file_no_suffix + x for x in test_suffixes]
+
+ dir_name = os.path.dirname(test_file)
+ candidates += (os.path.join(dir_name, x + test_file_name)
+ for x in test_prefixes)
test_files = (x for x in candidates if os.path.exists(x))
return _filter_python_tests(test_files, toolchain_utils)
@@ -128,7 +137,7 @@
def _run_test_scripts(all_tests, show_successful_output=False):
"""Runs a list of TestSpecs. Returns whether all of them succeeded."""
with contextlib.closing(multiprocessing.pool.ThreadPool()) as pool:
- results = [pool.apply_async(_run_test, (test,)) for test in all_tests]
+ results = [pool.apply_async(_run_test, (test, )) for test in all_tests]
failures = []
for i, (test, future) in enumerate(zip(all_tests, results)):
@@ -234,16 +243,15 @@
default_toolchain_utils = os.path.abspath(os.path.dirname(__file__))
parser = argparse.ArgumentParser(description=__doc__)
- parser.add_argument(
- '--show_all_output',
- action='store_true',
- help='show stdout of successful tests')
- parser.add_argument(
- '--toolchain_utils',
- default=default_toolchain_utils,
- help='directory of toolchain-utils. Often auto-detected')
- parser.add_argument(
- 'file', nargs='*', help='a file that we should run tests for')
+ parser.add_argument('--show_all_output',
+ action='store_true',
+ help='show stdout of successful tests')
+ parser.add_argument('--toolchain_utils',
+ default=default_toolchain_utils,
+ help='directory of toolchain-utils. Often auto-detected')
+ parser.add_argument('file',
+ nargs='*',
+ help='a file that we should run tests for')
args = parser.parse_args(argv)
modified_files = [os.path.abspath(f) for f in args.file]
diff --git a/rust_tools/rust_uprev.py b/rust_tools/rust_uprev.py
index 3c0ad01..011639d 100755
--- a/rust_tools/rust_uprev.py
+++ b/rust_tools/rust_uprev.py
@@ -33,8 +33,6 @@
See `--help` for all available options.
"""
-# pylint: disable=cros-logging-import
-
import argparse
import pathlib
import json
@@ -44,16 +42,30 @@
import shutil
import subprocess
import sys
-import tempfile
+from pathlib import Path
from typing import Any, Callable, Dict, List, NamedTuple, Optional, T, Tuple
from llvm_tools import chroot, git
-RUST_PATH = '/mnt/host/source/src/third_party/chromiumos-overlay/dev-lang/rust'
+
+EQUERY = 'equery'
+GSUTIL = 'gsutil.py'
+MIRROR_PATH = 'gs://chromeos-localmirror/distfiles'
+RUST_PATH = Path(
+ '/mnt/host/source/src/third_party/chromiumos-overlay/dev-lang/rust')
def get_command_output(command: List[str], *args, **kwargs) -> str:
- return subprocess.check_output(
- command, encoding='utf-8', *args, **kwargs).strip()
+ return subprocess.check_output(command, encoding='utf-8', *args,
+ **kwargs).strip()
+
+
+def get_command_output_unchecked(command: List[str], *args, **kwargs) -> str:
+ return subprocess.run(command,
+ check=False,
+ stdout=subprocess.PIPE,
+ encoding='utf-8',
+ *args,
+ **kwargs).stdout.strip()
class RustVersion(NamedTuple):
@@ -75,8 +87,8 @@
r'\.ebuild$')
m = input_re.match(ebuild_name)
assert m, f'failed to parse {ebuild_name!r}'
- return RustVersion(
- int(m.group('major')), int(m.group('minor')), int(m.group('patch')))
+ return RustVersion(int(m.group('major')), int(m.group('minor')),
+ int(m.group('patch')))
@staticmethod
def parse(x: str) -> 'RustVersion':
@@ -87,13 +99,54 @@
r'(?:.ebuild)?$')
m = input_re.match(x)
assert m, f'failed to parse {x!r}'
- return RustVersion(
- int(m.group('major')), int(m.group('minor')), int(m.group('patch')))
+ return RustVersion(int(m.group('major')), int(m.group('minor')),
+ int(m.group('patch')))
+
+
+def compute_rustc_src_name(version: RustVersion) -> str:
+ return f'rustc-{version}-src.tar.gz'
+
+
+def compute_rust_bootstrap_prebuilt_name(version: RustVersion) -> str:
+ return f'rust-bootstrap-{version}.tbz2'
+
+
+def find_ebuild_for_package(name: str) -> os.PathLike:
+ """Returns the path to the ebuild for the named package."""
+ return get_command_output([EQUERY, 'w', name])
+
+
+def find_ebuild_path(directory: Path,
+ name: str,
+ version: Optional[RustVersion] = None) -> Path:
+ """Finds an ebuild in a directory.
+
+ Returns the path to the ebuild file. Asserts if there is not
+ exactly one match. The match is constrained by name and optionally
+ by version, but can match any patch level. E.g. "rust" version
+ 1.3.4 can match rust-1.3.4.ebuild but also rust-1.3.4-r6.ebuild.
+ """
+ if version:
+ pattern = f'{name}-{version}*.ebuild'
+ else:
+ pattern = f'{name}-*.ebuild'
+ matches = list(Path(directory).glob(pattern))
+ assert len(matches) == 1, matches
+ return matches[0]
+
+
+def get_rust_bootstrap_version():
+ """Get the version of the current rust-bootstrap package."""
+ bootstrap_ebuild = find_ebuild_path(rust_bootstrap_path(), 'rust-bootstrap')
+ m = re.match(r'^rust-bootstrap-(\d+).(\d+).(\d+)', bootstrap_ebuild.name)
+ assert m, bootstrap_ebuild.name
+ return RustVersion(int(m.group(1)), int(m.group(2)), int(m.group(3)))
def parse_commandline_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'--state_file',
required=True,
@@ -158,6 +211,18 @@
'specified, the tool will remove the oldest version in the chroot',
)
+ subparser_names.append('remove-bootstrap')
+ remove_bootstrap_parser = subparsers.add_parser(
+ 'remove-bootstrap',
+ help='Remove an old rust-bootstrap version',
+ )
+ remove_bootstrap_parser.add_argument(
+ '--version',
+ type=RustVersion.parse,
+ required=True,
+ help='rust-bootstrap version to remove',
+ )
+
subparser_names.append('roll')
roll_parser = subparsers.add_parser(
'roll',
@@ -210,33 +275,18 @@
return args
-def parse_stage0_file(new_version: RustVersion) -> Tuple[str, str, str]:
- # Find stage0 date, rustc and cargo
- stage0_file = get_command_output([
- 'curl', '-f', 'https://raw.githubusercontent.com/rust-lang/rust/'
- f'{new_version}/src/stage0.txt'
- ])
- regexp = re.compile(r'date:\s*(?P<date>\d+-\d+-\d+)\s+'
- r'rustc:\s*(?P<rustc>\d+\.\d+\.\d+)\s+'
- r'cargo:\s*(?P<cargo>\d+\.\d+\.\d+)')
- m = regexp.search(stage0_file)
- assert m, 'failed to parse stage0.txt file'
- stage0_date, stage0_rustc, stage0_cargo = m.groups()
- logging.info('Found stage0 file has date: %s, rustc: %s, cargo: %s',
- stage0_date, stage0_rustc, stage0_cargo)
- return stage0_date, stage0_rustc, stage0_cargo
-
-
def prepare_uprev(rust_version: RustVersion, template: Optional[RustVersion]
- ) -> Optional[Tuple[RustVersion, str]]:
+ ) -> Optional[Tuple[RustVersion, str, RustVersion]]:
if template is None:
- ebuild_path = get_command_output(['equery', 'w', 'rust'])
+ ebuild_path = find_ebuild_for_package('rust')
ebuild_name = os.path.basename(ebuild_path)
template_version = RustVersion.parse_from_ebuild(ebuild_name)
else:
ebuild_path = find_ebuild_for_rust_version(template)
template_version = template
+ bootstrap_version = get_rust_bootstrap_version()
+
if rust_version <= template_version:
logging.info(
'Requested version %s is not newer than the template version %s.',
@@ -245,66 +295,69 @@
logging.info('Template Rust version is %s (ebuild: %r)', template_version,
ebuild_path)
- return template_version, ebuild_path
+ logging.info('rust-bootstrap version is %s', bootstrap_version)
+
+ return template_version, ebuild_path, bootstrap_version
-def copy_patches(template_version: RustVersion,
+def copy_patches(directory: Path, template_version: RustVersion,
new_version: RustVersion) -> None:
- patch_path = os.path.join(RUST_PATH, 'files')
+ patch_path = directory.joinpath('files')
+ prefix = '%s-%s-' % (directory.name, template_version)
+ new_prefix = '%s-%s-' % (directory.name, new_version)
for f in os.listdir(patch_path):
- if f'rust-{template_version}' not in f:
+ if not f.startswith(prefix):
continue
- logging.info('Rename patch %s to new version', f)
+ logging.info('Copy patch %s to new version', f)
new_name = f.replace(str(template_version), str(new_version))
shutil.copyfile(
os.path.join(patch_path, f),
os.path.join(patch_path, new_name),
)
- subprocess.check_call(['git', 'add', f'files/rust-{new_version}-*.patch'],
- cwd=RUST_PATH)
+ subprocess.check_call(['git', 'add', f'{new_prefix}*.patch'], cwd=patch_path)
def create_ebuild(template_ebuild: str, new_version: RustVersion) -> str:
shutil.copyfile(template_ebuild,
- os.path.join(RUST_PATH, f'rust-{new_version}.ebuild'))
+ RUST_PATH.joinpath(f'rust-{new_version}.ebuild'))
subprocess.check_call(['git', 'add', f'rust-{new_version}.ebuild'],
cwd=RUST_PATH)
return os.path.join(RUST_PATH, f'rust-{new_version}.ebuild')
-def update_ebuild(ebuild_file: str, stage0_info: Tuple[str, str, str]) -> None:
- stage0_date, stage0_rustc, stage0_cargo = stage0_info
- with open(ebuild_file, encoding='utf-8') as f:
- contents = f.read()
- # Update STAGE0_DATE in the ebuild
- stage0_date_re = re.compile(r'STAGE0_DATE="(\d+-\d+-\d+)"')
- if not stage0_date_re.search(contents):
- raise RuntimeError('STAGE0_DATE not found in rust ebuild')
- new_contents = stage0_date_re.sub(f'STAGE0_DATE="{stage0_date}"', contents)
-
- # Update STAGE0_VERSION in the ebuild
- stage0_rustc_re = re.compile(r'STAGE0_VERSION="[^"]*"')
- if not stage0_rustc_re.search(new_contents):
- raise RuntimeError('STAGE0_VERSION not found in rust ebuild')
- new_contents = stage0_rustc_re.sub(f'STAGE0_VERSION="{stage0_rustc}"',
- new_contents)
-
- # Update STAGE0_VERSION_CARGO in the ebuild
- stage0_cargo_re = re.compile(r'STAGE0_VERSION_CARGO="[^"]*"')
- if not stage0_cargo_re.search(new_contents):
- raise RuntimeError('STAGE0_VERSION_CARGO not found in rust ebuild')
- new_contents = stage0_cargo_re.sub(f'STAGE0_VERSION_CARGO="{stage0_cargo}"',
- new_contents)
- with open(ebuild_file, 'w', encoding='utf-8') as f:
- f.write(new_contents)
- logging.info(
- 'Rust ebuild file has STAGE0_DATE, STAGE0_VERSION, STAGE0_VERSION_CARGO '
- 'updated to %s, %s, %s respectively', stage0_date, stage0_rustc,
- stage0_cargo)
+def update_bootstrap_ebuild(new_bootstrap_version: RustVersion) -> None:
+ old_ebuild = find_ebuild_path(rust_bootstrap_path(), 'rust-bootstrap')
+ m = re.match(r'^rust-bootstrap-(\d+).(\d+).(\d+)', old_ebuild.name)
+ assert m, old_ebuild.name
+ old_version = RustVersion(m.group(1), m.group(2), m.group(3))
+ new_ebuild = old_ebuild.parent.joinpath(
+ f'rust-bootstrap-{new_bootstrap_version}.ebuild')
+ old_text = old_ebuild.read_text(encoding='utf-8')
+ new_text, changes = re.subn(r'(RUSTC_RAW_FULL_BOOTSTRAP_SEQUENCE=\([^)]*)',
+ f'\\1\t{old_version}\n',
+ old_text,
+ flags=re.MULTILINE)
+ assert changes == 1, 'Failed to update RUSTC_RAW_FULL_BOOTSTRAP_SEQUENCE'
+ new_ebuild.write_text(new_text, encoding='utf-8')
-def flip_mirror_in_ebuild(ebuild_file: str, add: bool) -> None:
+def update_ebuild(ebuild_file: str,
+ new_bootstrap_version: RustVersion) -> None:
+ contents = open(ebuild_file, encoding='utf-8').read()
+ contents, subs = re.subn(r'^BOOTSTRAP_VERSION=.*$',
+ 'BOOTSTRAP_VERSION="%s"' %
+ (new_bootstrap_version, ),
+ contents,
+ flags=re.MULTILINE)
+ if not subs:
+ raise RuntimeError('BOOTSTRAP_VERSION not found in rust ebuild')
+ open(ebuild_file, 'w', encoding='utf-8').write(contents)
+ logging.info('Rust ebuild file has BOOTSTRAP_VERSION updated to %s',
+ new_bootstrap_version)
+
+
+def flip_mirror_in_ebuild(ebuild_file: Path, add: bool) -> None:
restrict_re = re.compile(
r'(?P<before>RESTRICT=")(?P<values>"[^"]*"|.*)(?P<after>")')
with open(ebuild_file, encoding='utf-8') as f:
@@ -325,25 +378,116 @@
f.write(new_contents)
-def rust_ebuild_actions(actions: List[str], sudo: bool = False) -> None:
- ebuild_path_inchroot = get_command_output(['equery', 'w', 'rust'])
+def ebuild_actions(package: str, actions: List[str],
+ sudo: bool = False) -> None:
+ ebuild_path_inchroot = find_ebuild_for_package(package)
cmd = ['ebuild', ebuild_path_inchroot] + actions
if sudo:
cmd = ['sudo'] + cmd
subprocess.check_call(cmd)
-def update_manifest(ebuild_file: str) -> None:
- logging.info('Added "mirror" to RESTRICT to Rust ebuild')
- flip_mirror_in_ebuild(ebuild_file, add=True)
- rust_ebuild_actions(['manifest'])
- logging.info('Removed "mirror" to RESTRICT from Rust ebuild')
- flip_mirror_in_ebuild(ebuild_file, add=False)
+def fetch_distfile_from_mirror(name: str) -> None:
+ """Gets the named file from the local mirror.
+
+ This ensures that the file exists on the mirror, and
+ that we can read it. We overwrite any existing distfile
+ to ensure the checksums that update_manifest() records
+ match the file as it exists on the mirror.
+
+ This function also attempts to verify the ACL for
+ the file (which is expected to have READER permission
+ for allUsers). We can only see the ACL if the user
+ gsutil runs with is the owner of the file. If not,
+ we get an access denied error. We also count this
+ as a success, because it means we were able to fetch
+ the file even though we don't own it.
+ """
+ mirror_file = MIRROR_PATH + '/' + name
+ local_file = Path(get_distdir(), name)
+ cmd = [GSUTIL, 'cp', mirror_file, local_file]
+ logging.info('Running %r', cmd)
+ rc = subprocess.call(cmd)
+ if rc != 0:
+ logging.error(
+ """Could not fetch %s
+
+If the file does not yet exist at %s
+please download the file, verify its integrity
+with something like:
+
+curl -O https://static.rust-lang.org/dist/%s
+gpg --verify %s.asc
+
+You may need to import the signing key first, e.g.:
+
+gpg --recv-keys 85AB96E6FA1BE5FE
+
+Once you have verify the integrity of the file, upload
+it to the local mirror using gsutil cp.
+""", mirror_file, MIRROR_PATH, name, name)
+ raise Exception(f'Could not fetch {mirror_file}')
+ # Check that the ACL allows allUsers READER access.
+ # If we get an AccessDeniedAcception here, that also
+ # counts as a success, because we were able to fetch
+ # the file as a non-owner.
+ cmd = [GSUTIL, 'acl', 'get', mirror_file]
+ logging.info('Running %r', cmd)
+ output = get_command_output_unchecked(cmd, stderr=subprocess.STDOUT)
+ acl_verified = False
+ if 'AccessDeniedException:' in output:
+ acl_verified = True
+ else:
+ acl = json.loads(output)
+ for x in acl:
+ if x['entity'] == 'allUsers' and x['role'] == 'READER':
+ acl_verified = True
+ break
+ if not acl_verified:
+ logging.error('Output from acl get:\n%s', output)
+ raise Exception('Could not verify that allUsers has READER permission')
+
+
+def fetch_bootstrap_distfiles(old_version: RustVersion,
+ new_version: RustVersion) -> None:
+ """Fetches rust-bootstrap distfiles from the local mirror
+
+ Fetches the distfiles for a rust-bootstrap ebuild to ensure they
+ are available on the mirror and the local copies are the same as
+ the ones on the mirror.
+ """
+ fetch_distfile_from_mirror(compute_rust_bootstrap_prebuilt_name(old_version))
+ fetch_distfile_from_mirror(compute_rustc_src_name(new_version))
+
+
+def fetch_rust_distfiles(version: RustVersion) -> None:
+ """Fetches rust distfiles from the local mirror
+
+ Fetches the distfiles for a rust ebuild to ensure they
+ are available on the mirror and the local copies are
+ the same as the ones on the mirror.
+ """
+ fetch_distfile_from_mirror(compute_rustc_src_name(version))
+
+
+def get_distdir() -> os.PathLike:
+ """Returns portage's distdir."""
+ return get_command_output(['portageq', 'distdir'])
+
+
+def update_manifest(ebuild_file: os.PathLike) -> None:
+ """Updates the MANIFEST for the ebuild at the given path."""
+ ebuild = Path(ebuild_file)
+ logging.info('Added "mirror" to RESTRICT to %s', ebuild.name)
+ flip_mirror_in_ebuild(ebuild, add=True)
+ ebuild_actions(ebuild.parent.name, ['manifest'])
+ logging.info('Removed "mirror" to RESTRICT from %s', ebuild.name)
+ flip_mirror_in_ebuild(ebuild, add=False)
def update_rust_packages(rust_version: RustVersion, add: bool) -> None:
- package_file = os.path.join(
- RUST_PATH, '../../profiles/targets/chromeos/package.provided')
+ package_file = RUST_PATH.joinpath(
+ '../../profiles/targets/chromeos/package.provided')
with open(package_file, encoding='utf-8') as f:
contents = f.read()
if add:
@@ -367,90 +511,13 @@
def update_virtual_rust(template_version: RustVersion,
new_version: RustVersion) -> None:
- virtual_rust_dir = os.path.join(RUST_PATH, '../../virtual/rust')
- assert os.path.exists(virtual_rust_dir)
- shutil.copyfile(
- os.path.join(virtual_rust_dir, f'rust-{template_version}.ebuild'),
- os.path.join(virtual_rust_dir, f'rust-{new_version}.ebuild'))
- subprocess.check_call(['git', 'add', f'rust-{new_version}.ebuild'],
- cwd=virtual_rust_dir)
-
-
-def upload_single_tarball(rust_url: str, tarfile_name: str,
- tempdir: str) -> None:
- rust_src = f'{rust_url}/{tarfile_name}'
- gsutil_location = f'gs://chromeos-localmirror/distfiles/{tarfile_name}'
-
- missing_file = subprocess.call(
- ['gsutil', 'ls', gsutil_location],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- )
- if not missing_file:
- logging.info('Rust artifact at %s already exists; skipping download',
- gsutil_location)
- return
-
- logging.info('Downloading Rust artifact from %s', rust_src)
-
- # Download Rust's source
- rust_file = os.path.join(tempdir, tarfile_name)
- subprocess.check_call(['curl', '-f', '-o', rust_file, rust_src])
-
- # Verify the signature of the source
- sig_file = os.path.join(tempdir, 'rustc_sig.asc')
- subprocess.check_call(['curl', '-f', '-o', sig_file, f'{rust_src}.asc'])
- try:
- subprocess.check_output(['gpg', '--verify', sig_file, rust_file],
- encoding='utf-8',
- stderr=subprocess.STDOUT)
- except subprocess.CalledProcessError as e:
- if "gpg: Can't check signature" not in e.output:
- raise RuntimeError(f'Failed to execute `gpg --verify`, {e.output}')
-
- # If it fails to verify the signature, try import rustc key, and retry.
- keys = get_command_output(
- ['curl', '-f', 'https://keybase.io/rust/pgp_keys.asc'])
- subprocess.run(['gpg', '--import'],
- input=keys,
- encoding='utf-8',
- check=True)
- subprocess.check_call(['gpg', '--verify', sig_file, rust_file])
-
- # Since we are using `-n` to skip an item if it already exists, there's no
- # need to check if the file exists on GS bucket or not.
- subprocess.check_call(
- ['gsutil', 'cp', '-n', '-a', 'public-read', rust_file, gsutil_location])
-
-
-def upload_to_localmirror(tempdir: str, rust_version: RustVersion,
- stage0_info: Tuple[str, str, str]) -> None:
- stage0_date, stage0_rustc, stage0_cargo = stage0_info
- rust_url = 'https://static.rust-lang.org/dist'
- # Upload rustc source
- upload_single_tarball(
- rust_url,
- f'rustc-{rust_version}-src.tar.gz',
- tempdir,
- )
- # Upload stage0 toolchain
- upload_single_tarball(
- f'{rust_url}/{stage0_date}',
- f'rust-std-{stage0_rustc}-x86_64-unknown-linux-gnu.tar.gz',
- tempdir,
- )
- # Upload stage0 source
- upload_single_tarball(
- rust_url,
- f'rustc-{stage0_rustc}-x86_64-unknown-linux-gnu.tar.gz',
- tempdir,
- )
- # Upload stage0 cargo
- upload_single_tarball(
- rust_url,
- f'cargo-{stage0_cargo}-x86_64-unknown-linux-gnu.tar.gz',
- tempdir,
- )
+ template_ebuild = find_ebuild_path(RUST_PATH.joinpath('../../virtual/rust'),
+ 'rust', template_version)
+ virtual_rust_dir = template_ebuild.parent
+ new_name = f'rust-{new_version}.ebuild'
+ new_ebuild = virtual_rust_dir.joinpath(new_name)
+ shutil.copyfile(template_ebuild, new_ebuild)
+ subprocess.check_call(['git', 'add', new_name], cwd=virtual_rust_dir)
def perform_step(state_file: pathlib.Path,
@@ -480,19 +547,18 @@
return val
-def prepare_uprev_from_json(obj: Any) -> Optional[Tuple[RustVersion, str]]:
+def prepare_uprev_from_json(
+ obj: Any) -> Optional[Tuple[RustVersion, str, RustVersion]]:
if not obj:
return None
- version, ebuild_path = obj
- return RustVersion(*version), ebuild_path
+ version, ebuild_path, bootstrap_version = obj
+ return RustVersion(*version), ebuild_path, RustVersion(*bootstrap_version)
def create_rust_uprev(rust_version: RustVersion,
maybe_template_version: Optional[RustVersion],
skip_compile: bool, run_step: Callable[[], T]) -> None:
- stage0_info = run_step(
- 'parse stage0 file', lambda: parse_stage0_file(rust_version))
- template_version, template_ebuild = run_step(
+ template_version, template_ebuild, old_bootstrap_version = run_step(
'prepare uprev',
lambda: prepare_uprev(rust_version, maybe_template_version),
result_from_json=prepare_uprev_from_json,
@@ -500,18 +566,31 @@
if template_ebuild is None:
return
- run_step('copy patches', lambda: copy_patches(template_version, rust_version))
+ # The fetch steps will fail (on purpose) if the files they check for
+ # are not available on the mirror. To make them pass, fetch the
+ # required files yourself, verify their checksums, then upload them
+ # to the mirror.
+ run_step(
+ 'fetch bootstrap distfiles', lambda: fetch_bootstrap_distfiles(
+ old_bootstrap_version, template_version))
+ run_step('fetch rust distfiles', lambda: fetch_rust_distfiles(rust_version))
+ run_step('update bootstrap ebuild', lambda: update_bootstrap_ebuild(
+ template_version))
+ run_step(
+ 'update bootstrap manifest', lambda: update_manifest(rust_bootstrap_path(
+ ).joinpath(f'rust-bootstrap-{template_version}.ebuild')))
+ run_step('copy patches', lambda: copy_patches(RUST_PATH, template_version,
+ rust_version))
ebuild_file = run_step(
'create ebuild', lambda: create_ebuild(template_ebuild, rust_version))
- run_step('update ebuild', lambda: update_ebuild(ebuild_file, stage0_info))
- with tempfile.TemporaryDirectory(dir='/tmp') as tempdir:
- run_step('upload_to_localmirror', lambda: upload_to_localmirror(
- tempdir, rust_version, stage0_info))
+ run_step(
+ 'update ebuild', lambda: update_ebuild(ebuild_file, template_version))
run_step('update manifest to add new version', lambda: update_manifest(
- ebuild_file))
+ Path(ebuild_file)))
if not skip_compile:
- run_step('emerge rust', lambda: subprocess.check_call(
- ['sudo', 'emerge', 'dev-lang/rust']))
+ run_step(
+ 'emerge rust', lambda: subprocess.check_call(
+ ['sudo', 'emerge', 'dev-lang/rust']))
run_step('insert version into rust packages', lambda: update_rust_packages(
rust_version, add=True))
run_step('upgrade virtual/rust', lambda: update_virtual_rust(
@@ -520,8 +599,7 @@
def find_rust_versions_in_chroot() -> List[Tuple[RustVersion, str]]:
return [(RustVersion.parse_from_ebuild(x), os.path.join(RUST_PATH, x))
- for x in os.listdir(RUST_PATH)
- if x.endswith('.ebuild')]
+ for x in os.listdir(RUST_PATH) if x.endswith('.ebuild')]
def find_oldest_rust_version_in_chroot() -> Tuple[RustVersion, str]:
@@ -547,32 +625,52 @@
subprocess.check_call(['git', 'rm', filename], cwd=path)
+def remove_rust_bootstrap_version(version: RustVersion,
+ run_step: Callable[[], T]) -> None:
+ prefix = f'rust-bootstrap-{version}'
+ run_step('remove old bootstrap ebuild', lambda: remove_files(
+ f'{prefix}*.ebuild', rust_bootstrap_path()))
+ ebuild_file = find_ebuild_for_package('rust-bootstrap')
+ run_step('update bootstrap manifest to delete old version', lambda:
+ update_manifest(ebuild_file))
+
+
def remove_rust_uprev(rust_version: Optional[RustVersion],
run_step: Callable[[], T]) -> None:
-
def find_desired_rust_version():
if rust_version:
return rust_version, find_ebuild_for_rust_version(rust_version)
return find_oldest_rust_version_in_chroot()
+ def find_desired_rust_version_from_json(obj: Any) -> Tuple[RustVersion, str]:
+ version, ebuild_path = obj
+ return RustVersion(*version), ebuild_path
+
delete_version, delete_ebuild = run_step(
'find rust version to delete',
find_desired_rust_version,
- result_from_json=prepare_uprev_from_json,
+ result_from_json=find_desired_rust_version_from_json,
)
run_step(
'remove patches', lambda: remove_files(
f'files/rust-{delete_version}-*.patch', RUST_PATH))
run_step('remove ebuild', lambda: remove_files(delete_ebuild, RUST_PATH))
- ebuild_file = get_command_output(['equery', 'w', 'rust'])
+ ebuild_file = find_ebuild_for_package('rust')
run_step('update manifest to delete old version', lambda: update_manifest(
ebuild_file))
run_step('remove version from rust packages', lambda: update_rust_packages(
delete_version, add=False))
- run_step(
- 'remove virtual/rust', lambda: remove_files(
- f'rust-{delete_version}.ebuild',
- os.path.join(RUST_PATH, '../../virtual/rust')))
+ run_step('remove virtual/rust', lambda: remove_virtual_rust(delete_version))
+
+
+def remove_virtual_rust(delete_version: RustVersion) -> None:
+ ebuild = find_ebuild_path(RUST_PATH.joinpath('../../virtual/rust'), 'rust',
+ delete_version)
+ subprocess.check_call(['git', 'rm', str(ebuild.name)], cwd=ebuild.parent)
+
+
+def rust_bootstrap_path() -> Path:
+ return RUST_PATH.joinpath('../rust-bootstrap')
def create_new_repo(rust_version: RustVersion) -> None:
@@ -586,7 +684,7 @@
def build_cross_compiler() -> None:
# Get target triples in ebuild
- rust_ebuild = get_command_output(['equery', 'w', 'rust'])
+ rust_ebuild = find_ebuild_for_package('rust')
with open(rust_ebuild, encoding='utf-8') as f:
contents = f.read()
@@ -657,6 +755,8 @@
run_step)
elif args.subparser_name == 'remove':
remove_rust_uprev(args.rust_version, run_step)
+ elif args.subparser_name == 'remove-bootstrap':
+ remove_rust_bootstrap_version(args.version, run_step)
else:
# If you have added more subparser_name, please also add the handlers above
assert args.subparser_name == 'roll'
@@ -665,6 +765,9 @@
run_step('build cross compiler', build_cross_compiler)
create_rust_uprev(args.uprev, args.template, args.skip_compile, run_step)
remove_rust_uprev(args.remove, run_step)
+ bootstrap_version = prepare_uprev_from_json(
+ completed_steps['prepare uprev'])[2]
+ remove_rust_bootstrap_version(bootstrap_version, run_step)
if not args.no_upload:
run_step('create rust uprev CL', lambda: create_new_commit(args.uprev))
diff --git a/rust_tools/rust_uprev_test.py b/rust_tools/rust_uprev_test.py
index fc50600..0076139 100755
--- a/rust_tools/rust_uprev_test.py
+++ b/rust_tools/rust_uprev_test.py
@@ -6,16 +6,94 @@
"""Tests for rust_uprev.py"""
-# pylint: disable=cros-logging-import
import os
import shutil
import subprocess
+import tempfile
import unittest
+from pathlib import Path
from unittest import mock
from llvm_tools import git
import rust_uprev
+from rust_uprev import RustVersion
+
+
+def _fail_command(cmd, *_args, **_kwargs):
+ err = subprocess.CalledProcessError(returncode=1, cmd=cmd)
+ err.stderr = b'mock failure'
+ raise err
+
+
+class FetchDistfileTest(unittest.TestCase):
+ """Tests rust_uprev.fetch_distfile_from_mirror()"""
+
+ @mock.patch.object(rust_uprev, 'get_distdir', return_value='/fake/distfiles')
+ @mock.patch.object(subprocess, 'call', side_effect=_fail_command)
+ def test_fetch_difstfile_fail(self, *_args) -> None:
+ with self.assertRaises(subprocess.CalledProcessError):
+ rust_uprev.fetch_distfile_from_mirror('test_distfile.tar.gz')
+
+ @mock.patch.object(rust_uprev,
+ 'get_command_output_unchecked',
+ return_value='AccessDeniedException: Access denied.')
+ @mock.patch.object(rust_uprev, 'get_distdir', return_value='/fake/distfiles')
+ @mock.patch.object(subprocess, 'call', return_value=0)
+ def test_fetch_distfile_acl_access_denied(self, *_args) -> None:
+ rust_uprev.fetch_distfile_from_mirror('test_distfile.tar.gz')
+
+ @mock.patch.object(
+ rust_uprev,
+ 'get_command_output_unchecked',
+ return_value='[ { "entity": "allUsers", "role": "READER" } ]')
+ @mock.patch.object(rust_uprev, 'get_distdir', return_value='/fake/distfiles')
+ @mock.patch.object(subprocess, 'call', return_value=0)
+ def test_fetch_distfile_acl_ok(self, *_args) -> None:
+ rust_uprev.fetch_distfile_from_mirror('test_distfile.tar.gz')
+
+ @mock.patch.object(
+ rust_uprev,
+ 'get_command_output_unchecked',
+ return_value='[ { "entity": "[email protected]", "role": "OWNER" } ]')
+ @mock.patch.object(rust_uprev, 'get_distdir', return_value='/fake/distfiles')
+ @mock.patch.object(subprocess, 'call', return_value=0)
+ def test_fetch_distfile_acl_wrong(self, *_args) -> None:
+ with self.assertRaisesRegex(Exception, 'allUsers.*READER'):
+ with self.assertLogs(level='ERROR') as log:
+ rust_uprev.fetch_distfile_from_mirror('test_distfile.tar.gz')
+ self.assertIn(
+ '[ { "entity": "[email protected]", "role": "OWNER" } ]',
+ '\n'.join(log.output))
+
+
+class FindEbuildPathTest(unittest.TestCase):
+ """Tests for rust_uprev.find_ebuild_path()"""
+
+ def test_exact_version(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ ebuild = Path(tmpdir, 'test-1.3.4.ebuild')
+ ebuild.touch()
+ Path(tmpdir, 'test-1.2.3.ebuild').touch()
+ result = rust_uprev.find_ebuild_path(tmpdir, 'test',
+ rust_uprev.RustVersion(1, 3, 4))
+ self.assertEqual(result, ebuild)
+
+ def test_no_version(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ ebuild = Path(tmpdir, 'test-1.2.3.ebuild')
+ ebuild.touch()
+ result = rust_uprev.find_ebuild_path(tmpdir, 'test')
+ self.assertEqual(result, ebuild)
+
+ def test_patch_version(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ ebuild = Path(tmpdir, 'test-1.3.4-r3.ebuild')
+ ebuild.touch()
+ Path(tmpdir, 'test-1.2.3.ebuild').touch()
+ result = rust_uprev.find_ebuild_path(tmpdir, 'test',
+ rust_uprev.RustVersion(1, 3, 4))
+ self.assertEqual(result, ebuild)
class RustVersionTest(unittest.TestCase):
@@ -49,58 +127,77 @@
"""Tests for prepare_uprev step in rust_uprev"""
def setUp(self):
+ self.bootstrap_version = rust_uprev.RustVersion(1, 1, 0)
self.version_old = rust_uprev.RustVersion(1, 2, 3)
self.version_new = rust_uprev.RustVersion(1, 3, 5)
- @mock.patch.object(
- rust_uprev,
- 'find_ebuild_for_rust_version',
- return_value='/path/to/ebuild')
+ @mock.patch.object(rust_uprev,
+ 'find_ebuild_for_rust_version',
+ return_value='/path/to/ebuild')
+ @mock.patch.object(rust_uprev, 'find_ebuild_path')
@mock.patch.object(rust_uprev, 'get_command_output')
- def test_success_with_template(self, mock_command, mock_find_ebuild):
- expected = (self.version_old, '/path/to/ebuild')
- actual = rust_uprev.prepare_uprev(
- rust_version=self.version_new, template=self.version_old)
+ def test_success_with_template(self, mock_command, mock_find_ebuild,
+ _ebuild_for_version):
+ bootstrap_ebuild_path = Path(
+ '/path/to/rust-bootstrap/',
+ f'rust-bootstrap-{self.bootstrap_version}.ebuild')
+ mock_find_ebuild.return_value = bootstrap_ebuild_path
+ expected = (self.version_old, '/path/to/ebuild', self.bootstrap_version)
+ actual = rust_uprev.prepare_uprev(rust_version=self.version_new,
+ template=self.version_old)
self.assertEqual(expected, actual)
mock_command.assert_not_called()
- @mock.patch.object(
- rust_uprev,
- 'find_ebuild_for_rust_version',
- return_value='/path/to/ebuild')
+ @mock.patch.object(rust_uprev,
+ 'find_ebuild_for_rust_version',
+ return_value='/path/to/ebuild')
+ @mock.patch.object(rust_uprev,
+ 'get_rust_bootstrap_version',
+ return_value=RustVersion(0, 41, 12))
@mock.patch.object(rust_uprev, 'get_command_output')
def test_return_none_with_template_larger_than_input(self, mock_command,
- _mock_find_ebuild):
- ret = rust_uprev.prepare_uprev(
- rust_version=self.version_old, template=self.version_new)
+ *_args):
+ ret = rust_uprev.prepare_uprev(rust_version=self.version_old,
+ template=self.version_new)
self.assertIsNone(ret)
mock_command.assert_not_called()
+ @mock.patch.object(rust_uprev, 'find_ebuild_path')
@mock.patch.object(os.path, 'exists')
@mock.patch.object(rust_uprev, 'get_command_output')
- def test_success_without_template(self, mock_command, mock_exists):
+ def test_success_without_template(self, mock_command, mock_exists,
+ mock_find_ebuild):
rust_ebuild_path = f'/path/to/rust/rust-{self.version_old}-r3.ebuild'
mock_command.return_value = rust_ebuild_path
- expected = (self.version_old, rust_ebuild_path)
- actual = rust_uprev.prepare_uprev(
- rust_version=self.version_new, template=None)
+ bootstrap_ebuild_path = Path(
+ '/path/to/rust-bootstrap',
+ f'rust-bootstrap-{self.bootstrap_version}.ebuild')
+ mock_find_ebuild.return_value = bootstrap_ebuild_path
+ expected = (self.version_old, rust_ebuild_path, self.bootstrap_version)
+ actual = rust_uprev.prepare_uprev(rust_version=self.version_new,
+ template=None)
self.assertEqual(expected, actual)
mock_command.assert_called_once_with(['equery', 'w', 'rust'])
mock_exists.assert_not_called()
+ @mock.patch.object(rust_uprev,
+ 'get_rust_bootstrap_version',
+ return_value=RustVersion(0, 41, 12))
@mock.patch.object(os.path, 'exists')
@mock.patch.object(rust_uprev, 'get_command_output')
def test_return_none_with_ebuild_larger_than_input(self, mock_command,
- mock_exists):
+ mock_exists, *_args):
mock_command.return_value = f'/path/to/rust/rust-{self.version_new}.ebuild'
- ret = rust_uprev.prepare_uprev(rust_version=self.version_old, template=None)
+ ret = rust_uprev.prepare_uprev(rust_version=self.version_old,
+ template=None)
self.assertIsNone(ret)
mock_exists.assert_not_called()
def test_prepare_uprev_from_json(self):
ebuild_path = '/path/to/the/ebuild'
- json_result = (list(self.version_new), ebuild_path)
- expected = (self.version_new, ebuild_path)
+ json_result = (list(self.version_new), ebuild_path,
+ list(self.bootstrap_version))
+ expected = (self.version_new, ebuild_path, self.bootstrap_version)
actual = rust_uprev.prepare_uprev_from_json(json_result)
self.assertEqual(expected, actual)
@@ -108,32 +205,30 @@
class UpdateEbuildTest(unittest.TestCase):
"""Tests for update_ebuild step in rust_uprev"""
ebuild_file_before = """
- STAGE0_DATE="2019-01-01"
- STAGE0_VERSION="any.random.(number)"
- STAGE0_VERSION_CARGO="0.0.0"
+BOOTSTRAP_VERSION="1.2.0"
"""
ebuild_file_after = """
- STAGE0_DATE="2020-01-01"
- STAGE0_VERSION="1.1.1"
- STAGE0_VERSION_CARGO="0.1.0"
+BOOTSTRAP_VERSION="1.3.6"
"""
def test_success(self):
mock_open = mock.mock_open(read_data=self.ebuild_file_before)
+ # ebuild_file and new bootstrap version are deliberately different
ebuild_file = '/path/to/rust/rust-1.3.5.ebuild'
with mock.patch('builtins.open', mock_open):
- rust_uprev.update_ebuild(ebuild_file, ('2020-01-01', '1.1.1', '0.1.0'))
+ rust_uprev.update_ebuild(ebuild_file,
+ rust_uprev.RustVersion.parse('1.3.6'))
mock_open.return_value.__enter__().write.assert_called_once_with(
self.ebuild_file_after)
def test_fail_when_ebuild_misses_a_variable(self):
- ebuild_file = 'STAGE0_DATE="2019-01-01"'
- mock_open = mock.mock_open(read_data=ebuild_file)
+ mock_open = mock.mock_open(read_data='')
ebuild_file = '/path/to/rust/rust-1.3.5.ebuild'
with mock.patch('builtins.open', mock_open):
with self.assertRaises(RuntimeError) as context:
- rust_uprev.update_ebuild(ebuild_file, ('2020-01-01', '1.1.1', '0.1.0'))
- self.assertEqual('STAGE0_VERSION not found in rust ebuild',
+ rust_uprev.update_ebuild(ebuild_file,
+ rust_uprev.RustVersion.parse('1.2.0'))
+ self.assertEqual('BOOTSTRAP_VERSION not found in rust ebuild',
str(context.exception))
@@ -150,44 +245,79 @@
f'RESTRICT="{after}"')
def test_add_mirror_in_ebuild(self):
- self._run_test_flip_mirror(
- before='variable1 variable2',
- after='variable1 variable2 mirror',
- add=True,
- expect_write=True)
+ self._run_test_flip_mirror(before='variable1 variable2',
+ after='variable1 variable2 mirror',
+ add=True,
+ expect_write=True)
def test_remove_mirror_in_ebuild(self):
- self._run_test_flip_mirror(
- before='variable1 variable2 mirror',
- after='variable1 variable2',
- add=False,
- expect_write=True)
+ self._run_test_flip_mirror(before='variable1 variable2 mirror',
+ after='variable1 variable2',
+ add=False,
+ expect_write=True)
def test_add_mirror_when_exists(self):
- self._run_test_flip_mirror(
- before='variable1 variable2 mirror',
- after='variable1 variable2 mirror',
- add=True,
- expect_write=False)
+ self._run_test_flip_mirror(before='variable1 variable2 mirror',
+ after='variable1 variable2 mirror',
+ add=True,
+ expect_write=False)
def test_remove_mirror_when_not_exists(self):
- self._run_test_flip_mirror(
- before='variable1 variable2',
- after='variable1 variable2',
- add=False,
- expect_write=False)
+ self._run_test_flip_mirror(before='variable1 variable2',
+ after='variable1 variable2',
+ add=False,
+ expect_write=False)
@mock.patch.object(rust_uprev, 'flip_mirror_in_ebuild')
- @mock.patch.object(rust_uprev, 'rust_ebuild_actions')
+ @mock.patch.object(rust_uprev, 'ebuild_actions')
def test_update_manifest(self, mock_run, mock_flip):
- ebuild_file = '/path/to/rust/rust-1.1.1.ebuild'
+ ebuild_file = Path('/path/to/rust/rust-1.1.1.ebuild')
rust_uprev.update_manifest(ebuild_file)
- mock_run.assert_called_once_with(['manifest'])
+ mock_run.assert_called_once_with('rust', ['manifest'])
mock_flip.assert_has_calls(
[mock.call(ebuild_file, add=True),
mock.call(ebuild_file, add=False)])
+class UpdateBootstrapEbuildTest(unittest.TestCase):
+ """Tests for rust_uprev.update_bootstrap_ebuild()"""
+
+ def test_update_bootstrap_ebuild(self):
+ # The update should do two things:
+ # 1. Create a copy of rust-bootstrap's ebuild with the new version number.
+ # 2. Add the old PV to RUSTC_RAW_FULL_BOOTSTRAP_SEQUENCE.
+ with tempfile.TemporaryDirectory() as tmpdir_str, \
+ mock.patch.object(rust_uprev, 'find_ebuild_path') as mock_find_ebuild:
+ tmpdir = Path(tmpdir_str)
+ bootstrapdir = Path.joinpath(tmpdir, 'rust-bootstrap')
+ bootstrapdir.mkdir()
+ old_ebuild = bootstrapdir.joinpath('rust-bootstrap-1.45.2.ebuild')
+ old_ebuild.write_text(encoding='utf-8',
+ data="""
+some text
+RUSTC_RAW_FULL_BOOTSTRAP_SEQUENCE=(
+\t1.43.1
+\t1.44.1
+)
+some more text
+""")
+ mock_find_ebuild.return_value = old_ebuild
+ rust_uprev.update_bootstrap_ebuild(rust_uprev.RustVersion(1, 46, 0))
+ new_ebuild = bootstrapdir.joinpath('rust-bootstrap-1.46.0.ebuild')
+ self.assertTrue(new_ebuild.exists())
+ text = new_ebuild.read_text()
+ self.assertEqual(
+ text, """
+some text
+RUSTC_RAW_FULL_BOOTSTRAP_SEQUENCE=(
+\t1.43.1
+\t1.44.1
+\t1.45.2
+)
+some more text
+""")
+
+
class UpdateRustPackagesTests(unittest.TestCase):
"""Tests for update_rust_packages step."""
@@ -223,103 +353,6 @@
package_after)
-class UploadToLocalmirrorTests(unittest.TestCase):
- """Tests for upload_to_localmirror"""
-
- def setUp(self):
- self.tempdir = '/tmp/any/dir'
- self.new_version = rust_uprev.RustVersion(1, 3, 5)
- self.rust_url = 'https://static.rust-lang.org/dist'
- self.tarfile_name = f'rustc-{self.new_version}-src.tar.gz'
- self.rust_src = f'https://static.rust-lang.org/dist/{self.tarfile_name}'
- self.gsurl = f'gs://chromeos-localmirror/distfiles/{self.tarfile_name}'
- self.rust_file = os.path.join(self.tempdir, self.tarfile_name)
- self.sig_file = os.path.join(self.tempdir, 'rustc_sig.asc')
-
- @mock.patch.object(subprocess, 'call', return_value=1)
- @mock.patch.object(subprocess, 'check_call')
- @mock.patch.object(subprocess, 'check_output')
- @mock.patch.object(subprocess, 'run')
- def test_pass_without_retry(self, mock_run, mock_output, mock_call,
- mock_raw_call):
- rust_uprev.upload_single_tarball(self.rust_url, self.tarfile_name,
- self.tempdir)
- mock_output.assert_called_once_with(
- ['gpg', '--verify', self.sig_file, self.rust_file],
- encoding='utf-8',
- stderr=subprocess.STDOUT)
- mock_raw_call.assert_has_calls([
- mock.call(['gsutil', 'ls', self.gsurl],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL)
- ])
- mock_call.assert_has_calls([
- mock.call(['curl', '-f', '-o', self.rust_file, self.rust_src]),
- mock.call(['curl', '-f', '-o', self.sig_file, f'{self.rust_src}.asc']),
- mock.call([
- 'gsutil', 'cp', '-n', '-a', 'public-read', self.rust_file,
- self.gsurl
- ])
- ])
- mock_run.assert_not_called()
-
- @mock.patch.object(subprocess, 'call')
- @mock.patch.object(subprocess, 'check_call')
- @mock.patch.object(subprocess, 'check_output')
- @mock.patch.object(subprocess, 'run')
- @mock.patch.object(rust_uprev, 'get_command_output')
- def test_pass_with_retry(self, mock_output, mock_run, mock_check, mock_call,
- mock_raw_call):
- mock_check.side_effect = subprocess.CalledProcessError(
- returncode=2, cmd=None, output="gpg: Can't check signature")
- mock_output.return_value = 'some_gpg_keys'
- rust_uprev.upload_single_tarball(self.rust_url, self.tarfile_name,
- self.tempdir)
- mock_check.assert_called_once_with(
- ['gpg', '--verify', self.sig_file, self.rust_file],
- encoding='utf-8',
- stderr=subprocess.STDOUT)
- mock_output.assert_called_once_with(
- ['curl', '-f', 'https://keybase.io/rust/pgp_keys.asc'])
- mock_run.assert_called_once_with(['gpg', '--import'],
- input='some_gpg_keys',
- encoding='utf-8',
- check=True)
- mock_raw_call.assert_has_calls([
- mock.call(['gsutil', 'ls', self.gsurl],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL)
- ])
- mock_call.assert_has_calls([
- mock.call(['curl', '-f', '-o', self.rust_file, self.rust_src]),
- mock.call(['curl', '-f', '-o', self.sig_file, f'{self.rust_src}.asc']),
- mock.call(['gpg', '--verify', self.sig_file, self.rust_file]),
- mock.call([
- 'gsutil', 'cp', '-n', '-a', 'public-read', self.rust_file,
- self.gsurl
- ])
- ])
-
- @mock.patch.object(rust_uprev, 'upload_single_tarball')
- def test_upload_to_mirror(self, mock_upload):
- stage0_info = '2020-01-01', '1.1.1', '0.1.0'
- rust_uprev.upload_to_localmirror(self.tempdir, self.new_version,
- stage0_info)
- mock_upload.assert_has_calls([
- mock.call(self.rust_url, f'rustc-{self.new_version}-src.tar.gz',
- self.tempdir),
- mock.call(f'{self.rust_url}/{stage0_info[0]}',
- f'rust-std-{stage0_info[1]}-x86_64-unknown-linux-gnu.tar.gz',
- self.tempdir),
- mock.call(self.rust_url,
- f'rustc-{stage0_info[1]}-x86_64-unknown-linux-gnu.tar.gz',
- self.tempdir),
- mock.call(self.rust_url,
- f'cargo-{stage0_info[2]}-x86_64-unknown-linux-gnu.tar.gz',
- self.tempdir),
- ])
-
-
class RustUprevOtherStagesTests(unittest.TestCase):
"""Tests for other steps in rust_uprev"""
@@ -330,25 +363,6 @@
self.ebuild_file = os.path.join(rust_uprev.RUST_PATH,
'rust-{self.new_version}.ebuild')
- @mock.patch.object(rust_uprev, 'get_command_output')
- def test_parse_stage0_file(self, mock_get):
- stage0_file = """
- unrelated stuff before
- date: 2020-01-01
- rustc: 1.1.1
- cargo: 0.1.0
- unrelated stuff after
- """
- mock_get.return_value = stage0_file
- expected = '2020-01-01', '1.1.1', '0.1.0'
- rust_version = rust_uprev.RustVersion(1, 2, 3)
- actual = rust_uprev.parse_stage0_file(rust_version)
- self.assertEqual(expected, actual)
- mock_get.assert_called_once_with([
- 'curl', '-f', 'https://raw.githubusercontent.com/rust-lang/rust/'
- f'{rust_version}/src/stage0.txt'
- ])
-
@mock.patch.object(shutil, 'copyfile')
@mock.patch.object(os, 'listdir')
@mock.patch.object(subprocess, 'check_call')
@@ -359,7 +373,8 @@
f'rust-{self.current_version}-patch-1.patch',
f'rust-{self.current_version}-patch-2-new.patch'
]
- rust_uprev.copy_patches(self.current_version, self.new_version)
+ rust_uprev.copy_patches(rust_uprev.RUST_PATH, self.current_version,
+ self.new_version)
mock_copy.assert_has_calls([
mock.call(
os.path.join(rust_uprev.RUST_PATH, 'files',
@@ -374,8 +389,8 @@
f'rust-{self.new_version}-patch-2-new.patch'))
])
mock_call.assert_called_once_with(
- ['git', 'add', f'files/rust-{self.new_version}-*.patch'],
- cwd=rust_uprev.RUST_PATH)
+ ['git', 'add', f'rust-{self.new_version}-*.patch'],
+ cwd=rust_uprev.RUST_PATH.joinpath('files'))
@mock.patch.object(shutil, 'copyfile')
@mock.patch.object(subprocess, 'check_call')
@@ -384,23 +399,53 @@
rust_uprev.create_ebuild(template_ebuild, self.new_version)
mock_copy.assert_called_once_with(
template_ebuild,
- os.path.join(rust_uprev.RUST_PATH, f'rust-{self.new_version}.ebuild'))
+ rust_uprev.RUST_PATH.joinpath(f'rust-{self.new_version}.ebuild'))
mock_call.assert_called_once_with(
['git', 'add', f'rust-{self.new_version}.ebuild'],
cwd=rust_uprev.RUST_PATH)
- @mock.patch.object(os.path, 'exists', return_value=True)
+ @mock.patch.object(rust_uprev, 'find_ebuild_for_package')
+ @mock.patch.object(subprocess, 'check_call')
+ def test_remove_rust_bootstrap_version(self, mock_call, *_args):
+ bootstrap_path = os.path.join(rust_uprev.RUST_PATH, '..', 'rust-bootstrap')
+ rust_uprev.remove_rust_bootstrap_version(self.old_version, lambda *x: ())
+ mock_call.has_calls([
+ [
+ 'git', 'rm',
+ os.path.join(bootstrap_path, 'files',
+ f'rust-bootstrap-{self.old_version}-*.patch')
+ ],
+ [
+ 'git', 'rm',
+ os.path.join(bootstrap_path,
+ f'rust-bootstrap-{self.old_version}.ebuild')
+ ],
+ ])
+
+ @mock.patch.object(rust_uprev, 'find_ebuild_path')
+ @mock.patch.object(subprocess, 'check_call')
+ def test_remove_virtual_rust(self, mock_call, mock_find_ebuild):
+ ebuild_path = Path(
+ f'/some/dir/virtual/rust/rust-{self.old_version}.ebuild')
+ mock_find_ebuild.return_value = Path(ebuild_path)
+ rust_uprev.remove_virtual_rust(self.old_version)
+ mock_call.assert_called_once_with(
+ ['git', 'rm', str(ebuild_path.name)], cwd=ebuild_path.parent)
+
+ @mock.patch.object(rust_uprev, 'find_ebuild_path')
@mock.patch.object(shutil, 'copyfile')
@mock.patch.object(subprocess, 'check_call')
- def test_update_virtual_rust(self, mock_call, mock_copy, mock_exists):
- virtual_rust_dir = os.path.join(rust_uprev.RUST_PATH, '../../virtual/rust')
+ def test_update_virtual_rust(self, mock_call, mock_copy, mock_find_ebuild):
+ ebuild_path = Path(
+ f'/some/dir/virtual/rust/rust-{self.current_version}.ebuild')
+ mock_find_ebuild.return_value = Path(ebuild_path)
rust_uprev.update_virtual_rust(self.current_version, self.new_version)
mock_call.assert_called_once_with(
- ['git', 'add', f'rust-{self.new_version}.ebuild'], cwd=virtual_rust_dir)
+ ['git', 'add', f'rust-{self.new_version}.ebuild'],
+ cwd=ebuild_path.parent)
mock_copy.assert_called_once_with(
- os.path.join(virtual_rust_dir, f'rust-{self.current_version}.ebuild'),
- os.path.join(virtual_rust_dir, f'rust-{self.new_version}.ebuild'))
- mock_exists.assert_called_once_with(virtual_rust_dir)
+ ebuild_path.parent.joinpath(f'rust-{self.current_version}.ebuild'),
+ ebuild_path.parent.joinpath(f'rust-{self.new_version}.ebuild'))
@mock.patch.object(os, 'listdir')
def test_find_oldest_rust_version_in_chroot_pass(self, mock_ls):
diff --git a/rust_tools/rust_watch.py b/rust_tools/rust_watch.py
index b9ad7b8..c347d2c 100755
--- a/rust_tools/rust_watch.py
+++ b/rust_tools/rust_watch.py
@@ -9,8 +9,6 @@
Sends an email if something interesting (probably) happened.
"""
-# pylint: disable=cros-logging-import
-
import argparse
import itertools
import json
@@ -21,10 +19,9 @@
import subprocess
import sys
import time
-from typing import Any, Dict, Iterable, List, Optional, Tuple, NamedTuple
+from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Tuple
-from cros_utils import email_sender
-from cros_utils import tiny_render
+from cros_utils import bugs, email_sender, tiny_render
def gentoo_sha_to_link(sha: str) -> str:
@@ -164,7 +161,7 @@
'git',
'log',
'--format=%H %s',
- f'{most_recent_sha}..origin/master',
+ f'{most_recent_sha}..origin/master', # nocheck
'--',
'dev-lang/rust',
],
@@ -225,43 +222,63 @@
temp_file.rename(state_file)
-def maybe_compose_email(old_state: State, newest_release: RustReleaseVersion,
- new_gentoo_commits: List[GitCommit]
- ) -> Optional[Tuple[str, List[tiny_render.Piece]]]:
+def file_bug(title: str, body: str) -> None:
+ """Files a bug against gbiv@ with the given title/body."""
+ bugs.CreateNewBug(
+ bugs.WellKnownComponents.CrOSToolchainPublic,
+ title,
+ body,
+ # To either take or reassign depending on the rotation.
+ assignee='[email protected]',
+ )
+
+
+def maybe_compose_bug(
+ old_state: State,
+ newest_release: RustReleaseVersion,
+) -> Optional[Tuple[str, str]]:
+ """Creates a bug to file about the new release, if doing is desired."""
+ if newest_release == old_state.last_seen_release:
+ return None
+
+ title = f'[Rust] Update to {newest_release}'
+ body = ('A new release has been detected; we should probably roll to it. '
+ "Please see go/crostc-rust-rotation for who's turn it is.")
+ return title, body
+
+
+def maybe_compose_email(
+ new_gentoo_commits: List[GitCommit]
+) -> Optional[Tuple[str, List[tiny_render.Piece]]]:
"""Creates an email given our new state, if doing so is appropriate."""
+ if not new_gentoo_commits:
+ return None
+
subject_pieces = []
body_pieces = []
- if newest_release > old_state.last_seen_release:
- subject_pieces.append('new rustc release detected')
- body_pieces.append(f'Rustc tag for v{newest_release} was found.')
+ # Separate the sections a bit for prettier output.
+ if body_pieces:
+ body_pieces += [tiny_render.line_break, tiny_render.line_break]
- if new_gentoo_commits:
- # Separate the sections a bit for prettier output.
- if body_pieces:
- body_pieces += [tiny_render.line_break, tiny_render.line_break]
+ if len(new_gentoo_commits) == 1:
+ subject_pieces.append('new rust ebuild commit detected')
+ body_pieces.append('commit:')
+ else:
+ subject_pieces.append('new rust ebuild commits detected')
+ body_pieces.append('commits (newest first):')
- if len(new_gentoo_commits) == 1:
- subject_pieces.append('new rust ebuild commit detected')
- body_pieces.append('commit:')
- else:
- subject_pieces.append('new rust ebuild commits detected')
- body_pieces.append('commits (newest first):')
+ commit_lines = []
+ for commit in new_gentoo_commits:
+ commit_lines.append([
+ tiny_render.Link(
+ gentoo_sha_to_link(commit.sha),
+ commit.sha[:12],
+ ),
+ f': {commit.subject}',
+ ])
- commit_lines = []
- for commit in new_gentoo_commits:
- commit_lines.append([
- tiny_render.Link(
- gentoo_sha_to_link(commit.sha),
- commit.sha[:12],
- ),
- f': {commit.subject}',
- ])
-
- body_pieces.append(tiny_render.UnorderedList(commit_lines))
-
- if not subject_pieces:
- return None
+ body_pieces.append(tiny_render.UnorderedList(commit_lines))
subject = '[rust-watch] ' + '; '.join(subject_pieces)
return subject, body_pieces
@@ -271,11 +288,14 @@
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(
- description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
- parser.add_argument(
- '--state_dir', required=True, help='Directory to store state in.')
- parser.add_argument(
- '--skip_email', action='store_true', help="Don't send an email.")
+ description=__doc__,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument('--state_dir',
+ required=True,
+ help='Directory to store state in.')
+ parser.add_argument('--skip_side_effects',
+ action='store_true',
+ help="Don't send an email or file a bug.")
parser.add_argument(
'--skip_state_update',
action='store_true',
@@ -318,14 +338,25 @@
prior_state.last_gentoo_sha)
logging.info('New commits: %r', new_commits)
- maybe_email = maybe_compose_email(prior_state, most_recent_release,
- new_commits)
+ maybe_bug = maybe_compose_bug(prior_state, most_recent_release)
+ maybe_email = maybe_compose_email(new_commits)
+
+ if maybe_bug is None:
+ logging.info('No bug to file')
+ else:
+ title, body = maybe_bug
+ if opts.skip_side_effects:
+ logging.info('Skipping sending bug with title %r and contents\n%s',
+ title, body)
+ else:
+ logging.info('Writing new bug')
+ file_bug(title, body)
if maybe_email is None:
- logging.info('No updates to send')
+ logging.info('No email to send')
else:
title, body = maybe_email
- if opts.skip_email:
+ if opts.skip_side_effects:
logging.info('Skipping sending email with title %r and contents\n%s',
title, tiny_render.render_html_pieces(body))
else:
@@ -336,8 +367,8 @@
logging.info('Skipping state update, as requested')
return
- newest_sha = (
- new_commits[-1].sha if new_commits else prior_state.last_gentoo_sha)
+ newest_sha = (new_commits[-1].sha
+ if new_commits else prior_state.last_gentoo_sha)
atomically_write_state(
state_file,
State(
diff --git a/rust_tools/rust_watch_test.py b/rust_tools/rust_watch_test.py
index 97d111f..583a912 100755
--- a/rust_tools/rust_watch_test.py
+++ b/rust_tools/rust_watch_test.py
@@ -6,8 +6,6 @@
"""Tests for rust_watch.py."""
-# pylint: disable=cros-logging-import
-
import logging
import pathlib
import subprocess
@@ -15,16 +13,15 @@
import unittest
import unittest.mock
-import rust_watch
from cros_utils import tiny_render
+import rust_watch
+
class Test(unittest.TestCase):
"""Tests."""
-
def _silence_logs(self):
"""Silences all log output until the end of the current test."""
-
def should_log(_record):
return 0
@@ -40,8 +37,8 @@
def test_release_version_json_round_trips(self):
ver = rust_watch.RustReleaseVersion(1, 2, 3)
- self.assertEqual(
- rust_watch.RustReleaseVersion.from_json(ver.to_json()), ver)
+ self.assertEqual(rust_watch.RustReleaseVersion.from_json(ver.to_json()),
+ ver)
def test_state_json_round_trips(self):
state = rust_watch.State(
@@ -98,34 +95,14 @@
rust_watch.GitCommit('abc123', 'newer commit'),
])
- def test_compose_email_on_a_new_release(self):
- new_release = rust_watch.maybe_compose_email(
- old_state=rust_watch.State(
- last_seen_release=rust_watch.RustReleaseVersion(1, 0, 0),
- last_gentoo_sha='',
- ),
- newest_release=rust_watch.RustReleaseVersion(1, 1, 0),
- new_gentoo_commits=[],
- )
-
- self.assertEqual(new_release, ('[rust-watch] new rustc release detected',
- ['Rustc tag for v1.1.0 was found.']))
-
def test_compose_email_on_a_new_gentoo_commit(self):
sha_a = 'a' * 40
- new_commit = rust_watch.maybe_compose_email(
- old_state=rust_watch.State(
- last_seen_release=rust_watch.RustReleaseVersion(1, 0, 0),
- last_gentoo_sha='',
+ new_commit = rust_watch.maybe_compose_email(new_gentoo_commits=[
+ rust_watch.GitCommit(
+ sha=sha_a,
+ subject='summary_a',
),
- newest_release=rust_watch.RustReleaseVersion(1, 0, 0),
- new_gentoo_commits=[
- rust_watch.GitCommit(
- sha=sha_a,
- subject='summary_a',
- ),
- ],
- )
+ ], )
self.assertEqual(new_commit,
('[rust-watch] new rust ebuild commit detected', [
@@ -141,60 +118,48 @@
])
]))
- def test_compose_email_on_multiple_events(self):
- sha_a = 'a' * 40
- new_commit_and_release = rust_watch.maybe_compose_email(
+ def test_compose_email_composes_nothing_when_no_new_updates_exist(self):
+ self.assertIsNone(rust_watch.maybe_compose_email(new_gentoo_commits=()))
+
+ def test_compose_bug_creates_bugs_on_new_versions(self):
+ title, body = rust_watch.maybe_compose_bug(
+ old_state=rust_watch.State(
+ last_seen_release=rust_watch.RustReleaseVersion(1, 0, 0),
+ last_gentoo_sha='',
+ ),
+ newest_release=rust_watch.RustReleaseVersion(1, 0, 1),
+ )
+ self.assertEqual(title, '[Rust] Update to 1.0.1')
+ self.assertTrue(body.startswith('A new release has been detected;'))
+
+ title, body = rust_watch.maybe_compose_bug(
old_state=rust_watch.State(
last_seen_release=rust_watch.RustReleaseVersion(1, 0, 0),
last_gentoo_sha='',
),
newest_release=rust_watch.RustReleaseVersion(1, 1, 0),
- new_gentoo_commits=[
- rust_watch.GitCommit(
- sha=sha_a,
- subject='summary_a',
- ),
- ],
)
+ self.assertEqual(title, '[Rust] Update to 1.1.0')
+ self.assertTrue(body.startswith('A new release has been detected;'))
- self.assertEqual(
- new_commit_and_release,
- ('[rust-watch] new rustc release detected; new rust ebuild commit '
- 'detected', [
- 'Rustc tag for v1.1.0 was found.',
- tiny_render.line_break,
- tiny_render.line_break,
- 'commit:',
- tiny_render.UnorderedList([
- [
- tiny_render.Link(
- rust_watch.gentoo_sha_to_link(sha_a),
- sha_a[:12],
- ),
- ': summary_a',
- ],
- ]),
- ]))
+ title, body = rust_watch.maybe_compose_bug(
+ old_state=rust_watch.State(
+ last_seen_release=rust_watch.RustReleaseVersion(1, 0, 0),
+ last_gentoo_sha='',
+ ),
+ newest_release=rust_watch.RustReleaseVersion(2, 0, 0),
+ )
+ self.assertEqual(title, '[Rust] Update to 2.0.0')
+ self.assertTrue(body.startswith('A new release has been detected;'))
- def test_compose_email_composes_nothing_when_no_new_updates_exist(self):
+ def test_compose_bug_does_nothing_when_no_new_updates_exist(self):
self.assertIsNone(
- rust_watch.maybe_compose_email(
+ rust_watch.maybe_compose_bug(
old_state=rust_watch.State(
last_seen_release=rust_watch.RustReleaseVersion(1, 0, 0),
last_gentoo_sha='',
),
newest_release=rust_watch.RustReleaseVersion(1, 0, 0),
- new_gentoo_commits=[],
- ))
-
- self.assertIsNone(
- rust_watch.maybe_compose_email(
- old_state=rust_watch.State(
- last_seen_release=rust_watch.RustReleaseVersion(1, 1, 0),
- last_gentoo_sha='',
- ),
- newest_release=rust_watch.RustReleaseVersion(1, 0, 0),
- new_gentoo_commits=[],
))
diff --git a/seccomp_tools/mass_seccomp_editor/mass_seccomp_editor.py b/seccomp_tools/mass_seccomp_editor/mass_seccomp_editor.py
new file mode 100755
index 0000000..d8dd762
--- /dev/null
+++ b/seccomp_tools/mass_seccomp_editor/mass_seccomp_editor.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python3
+
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to make mass, CrOS-wide seccomp changes."""
+
+import argparse
+import re
+import subprocess
+import sys
+import shutil
+from typing import Any, Iterable, Optional
+from dataclasses import dataclass, field
+
+# Pre-compiled regexes.
+AMD64_RE = re.compile(r'.*(amd|x86_)64.*\.policy')
+X86_RE = re.compile(r'.*x86.*\.policy')
+AARCH64_RE = re.compile(r'.*a(arch|rm)64.*\.policy')
+ARM_RE = re.compile(r'.*arm(v7)?.*\.policy')
+
+
+@dataclass(frozen=True)
+class Policies:
+ """Dataclass to hold lists of policies which match certain types."""
+ arm: list[str] = field(default_factory=list)
+ x86_64: list[str] = field(default_factory=list)
+ x86: list[str] = field(default_factory=list)
+ arm64: list[str] = field(default_factory=list)
+ none: list[str] = field(default_factory=list)
+
+ def to_dict(self) -> dict[str, list[str]]:
+ """Convert this class to a dictionary."""
+ return {**self.__dict__}
+
+
+def main():
+ """Run the program from cmd line"""
+ args = parse_args()
+ if all(x is None for x in [args.all, args.b64, args.b32, args.none]):
+ print('Require at least one of {--all, --b64, --b32, --none}',
+ file=sys.stderr)
+ sys.exit(1)
+ matches, success = find_potential_policy_files(args.packages)
+
+ separated = Policies()
+
+ for m in matches:
+ if AMD64_RE.match(m):
+ separated.x86_64.append(m)
+ continue
+ if X86_RE.match(m):
+ separated.x86.append(m)
+ continue
+ if AARCH64_RE.match(m):
+ separated.arm64.append(m)
+ continue
+ if ARM_RE.match(m):
+ separated.arm.append(m)
+ continue
+ separated.none.append(m)
+
+ syscall_lookup_table = _make_syscall_lookup_table(args)
+
+ for (type_, val) in separated.to_dict().items():
+ for fp in val:
+ syscalls = syscall_lookup_table[type_]
+ missing = check_missing_syscalls(syscalls, fp)
+ if missing is None:
+ print(f'E ({type_}) {fp}')
+ elif len(missing) == 0:
+ print(f'_ ({type_}) {fp}')
+ else:
+ missing_str = ','.join(missing)
+ print(f'M ({type_}) {fp} :: {missing_str}')
+
+ if not args.edit:
+ sys.exit(0 if success else 2)
+
+ for (type_, val) in separated.to_dict().items():
+ for fp in val:
+ syscalls = syscall_lookup_table[type_]
+ if args.force:
+ _confirm_add(fp, syscalls, args.yes)
+ continue
+ missing = check_missing_syscalls(syscalls, fp)
+ if missing is None or len(missing) == 0:
+ print(f'Already good for {fp} ({type_})')
+ else:
+ _confirm_add(fp, missing, args.yes)
+
+ sys.exit(0 if success else 2)
+
+
+def _make_syscall_lookup_table(args: Any) -> dict[str, list[str]]:
+ """Make lookup table, segmented by all/b32/b64/none policies.
+
+ Args:
+ args: Direct output from parse_args.
+
+ Returns:
+ dict of syscalls we want to search for in each policy file,
+ where the key is the policy file arch, and the value is
+ a list of syscalls as strings.
+ """
+ syscall_lookup_table = Policies().to_dict()
+ if args.all:
+ split_syscalls = [x.strip() for x in args.all.split(',')]
+ for v in syscall_lookup_table.values():
+ v.extend(split_syscalls)
+ if args.b32:
+ split_syscalls = [x.strip() for x in args.b32.split(',')]
+ syscall_lookup_table['x86'].extend(split_syscalls)
+ syscall_lookup_table['arm'].extend(split_syscalls)
+ if args.b64:
+ split_syscalls = [x.strip() for x in args.b64.split(',')]
+ syscall_lookup_table['x86_64'].extend(split_syscalls)
+ syscall_lookup_table['arm64'].extend(split_syscalls)
+ if args.none:
+ split_syscalls = [x.strip() for x in args.none.split(',')]
+ syscall_lookup_table['none'].extend(split_syscalls)
+ return syscall_lookup_table
+
+
+def _confirm_add(fp: str, syscalls: Iterable[str], noninteractive=None):
+ """Interactive confirmation check you wish to add a syscall.
+
+ Args:
+ fp: filepath of the file to edit.
+ syscalls: list-like of syscalls to add to append to the files.
+ noninteractive: Just add the syscalls without asking.
+ """
+ if noninteractive:
+ _update_seccomp(fp, list(syscalls))
+ return
+ syscalls_str = ','.join(syscalls)
+ user_input = input(f'Add {syscalls_str} for {fp}? [y/N]> ')
+ if user_input.lower().startswith('y'):
+ _update_seccomp(fp, list(syscalls))
+ print('Edited!')
+ else:
+ print(f'Skipping {fp}')
+
+
+def check_missing_syscalls(syscalls: list[str], fp: str) -> Optional[set[str]]:
+ """Return which specified syscalls are missing in the given file."""
+ missing_syscalls = set(syscalls)
+ with open(fp) as f:
+ try:
+ lines = f.readlines()
+ for syscall in syscalls:
+ for line in lines:
+ if re.match(syscall + r':\s*1', line):
+ missing_syscalls.remove(syscall)
+ except UnicodeDecodeError:
+ return None
+ return missing_syscalls
+
+
+def _update_seccomp(fp: str, missing_syscalls: list[str]):
+ """Update the seccomp of the file based on the seccomp change type."""
+ with open(fp, 'a') as f:
+ sorted_syscalls = sorted(missing_syscalls)
+ for to_write in sorted_syscalls:
+ f.write(to_write + ': 1\n')
+
+
+def _search_cmd(query: str, use_fd=True) -> list[str]:
+ if use_fd and shutil.which('fdfind') is not None:
+ return [
+ 'fdfind',
+ '-t',
+ 'f',
+ '--full-path',
+ f'^.*{query}.*\\.policy$',
+ ]
+ return [
+ 'find',
+ '.',
+ '-regex',
+ f'^.*{query}.*\\.policy$',
+ '-type',
+ 'f',
+ ]
+
+
+def find_potential_policy_files(packages: list[str]) -> tuple[list[str], bool]:
+ """Find potentially related policy files to the given packages.
+
+ Returns:
+ (policy_files, successful): A list of policy file paths, and a boolean
+ indicating whether all queries were successful in finding at least
+ one related policy file.
+ """
+ all_queries_succeeded = True
+ matches = []
+ for p in packages:
+ # It's quite common that hyphens are translated to underscores
+ # and similarly common that underscores are translated to hyphens.
+ # We make them agnostic here.
+ hyphen_agnostic = re.sub(r'[-_]', '[-_]', p)
+ cmd = subprocess.run(
+ _search_cmd(hyphen_agnostic),
+ stdout=subprocess.PIPE,
+ check=True,
+ )
+ new_matches = [a for a in cmd.stdout.decode('utf-8').split('\n') if a]
+ if not new_matches:
+ print(f'WARNING: No matches found for {p}', file=sys.stderr)
+ all_queries_succeeded = False
+ else:
+ matches.extend(new_matches)
+ return matches, all_queries_succeeded
+
+
+def parse_args() -> Any:
+ """Handle command line arguments."""
+ parser = argparse.ArgumentParser(
+ description='Check for missing syscalls in'
+ ' seccomp policy files, or make'
+ ' mass seccomp changes.\n\n'
+ 'The format of this output follows the template:\n'
+ ' status (arch) local/policy/filepath :: syscall,syscall,syscall\n'
+ 'Where the status can be "_" for present, "M" for missing,'
+ ' or "E" for Error\n\n'
+ 'Example:\n'
+ ' mass_seccomp_editor.py --all fstatfs --b32 fstatfs64'
+ ' modemmanager\n\n'
+ 'Exit Codes:\n'
+ " '0' for successfully found specific policy files\n"
+ " '1' for python-related error.\n"
+ " '2' for no matched policy files for a given query.",
+ formatter_class=argparse.RawTextHelpFormatter,
+ )
+ parser.add_argument('packages', nargs='+')
+ parser.add_argument(
+ '--all',
+ type=str,
+ metavar='syscalls',
+ help='comma separated syscalls to check in all policy files')
+ parser.add_argument(
+ '--b64',
+ type=str,
+ metavar='syscalls',
+ help='Comma separated syscalls to check in 64bit architectures')
+ parser.add_argument(
+ '--b32',
+ type=str,
+ metavar='syscalls',
+ help='Comma separated syscalls to check in 32bit architectures')
+ parser.add_argument(
+ '--none',
+ type=str,
+ metavar='syscalls',
+ help='Comma separated syscalls to check in unknown architectures')
+ parser.add_argument('--edit',
+ action='store_true',
+ help='Make changes to the listed files,'
+ ' rather than just printing out what is missing')
+ parser.add_argument('-y',
+ '--yes',
+ action='store_true',
+ help='Say "Y" to all interactive checks')
+ parser.add_argument('--force',
+ action='store_true',
+ help='Edit all files, regardless of missing status.'
+ ' Does nothing without --edit.')
+ return parser.parse_args()
+
+
+if __name__ == '__main__':
+ main()
diff --git a/seccomp_tools/mass_seccomp_editor/test_mass_seccomp_editor.py b/seccomp_tools/mass_seccomp_editor/test_mass_seccomp_editor.py
new file mode 100755
index 0000000..3e7aa4c
--- /dev/null
+++ b/seccomp_tools/mass_seccomp_editor/test_mass_seccomp_editor.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python3
+
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for mass_seccomp_editor.py"""
+
+import unittest
+from unittest import mock
+
+import mass_seccomp_editor
+
+BASE_SECCOMP_CONTENTS = """
+fstat: 1
+poll: 1
+foobar: 1
+"""
+
+TEST_FP = 'foo'
+
+
+class TestMassSeccompEditor(unittest.TestCase):
+ """Test the mass_seccomp_editor."""
+
+ def test_check_missing_sycalls(self):
+ """Test we can find missing syscalls."""
+ with mock.patch('builtins.open',
+ mock.mock_open(read_data=BASE_SECCOMP_CONTENTS)):
+ out = mass_seccomp_editor.check_missing_syscalls(
+ ['fstat', 'dup', 'fizzbuzz'], TEST_FP)
+ self.assertEqual(out, set(['dup', 'fizzbuzz']))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/setup_chromeos.py b/setup_chromeos.py
deleted file mode 100755
index 07c7530..0000000
--- a/setup_chromeos.py
+++ /dev/null
@@ -1,252 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-# Copyright 2010 The Chromium OS Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""Script to checkout the ChromeOS source.
-
-This script sets up the ChromeOS source in the given directory, matching a
-particular release of ChromeOS.
-"""
-
-from __future__ import print_function
-
-__author__ = '[email protected] (Raymes Khoury)'
-
-from datetime import datetime
-
-import argparse
-import os
-import pickle
-import sys
-import tempfile
-import time
-from cros_utils import command_executer
-from cros_utils import logger
-from cros_utils import manifest_versions
-
-GCLIENT_FILE = """solutions = [
- { "name" : "CHROME_DEPS",
- "url" :
- "svn://svn.chromium.org/chrome-internal/trunk/tools/buildspec/releases/%s",
- "custom_deps" : {
- "src/third_party/WebKit/LayoutTests": None,
- "src-pdf": None,
- "src/pdf": None,
- },
- "safesync_url": "",
- },
-]
-"""
-
-# List of stable versions used for common team image
-# Sheriff must update this list when a new common version becomes available
-COMMON_VERSIONS = '/home/mobiletc-prebuild/common_images/common_list.txt'
-
-
-def Usage(parser):
- parser.print_help()
- sys.exit(0)
-
-
-# Get version spec file, either from "paladin" or "buildspec" directory.
-def GetVersionSpecFile(version, versions_git):
- temp = tempfile.mkdtemp()
- commands = ['cd {0}'.format(temp), \
- 'git clone {0} versions'.format(versions_git)]
- cmd_executer = command_executer.GetCommandExecuter()
- ret = cmd_executer.RunCommands(commands)
- err_msg = None
- if ret:
- err_msg = 'Failed to checkout versions_git - {0}'.format(versions_git)
- ret = None
- else:
- v, m = version.split('.', 1)
- paladin_spec = 'paladin/buildspecs/{0}/{1}.xml'.format(v, m)
- generic_spec = 'buildspecs/{0}/{1}.xml'.format(v, m)
- paladin_path = '{0}/versions/{1}'.format(temp, paladin_spec)
- generic_path = '{0}/versions/{1}'.format(temp, generic_spec)
- if os.path.exists(paladin_path):
- ret = paladin_spec
- elif os.path.exists(generic_path):
- ret = generic_spec
- else:
- err_msg = 'No spec found for version {0}'.format(version)
- ret = None
- # Fall through to clean up.
-
- commands = ['rm -rf {0}'.format(temp)]
- cmd_executer.RunCommands(commands)
- if err_msg:
- logger.GetLogger().LogFatal(err_msg)
- return ret
-
-
-def TimeToCommonVersion(timestamp):
- """Convert timestamp to common image version."""
- tdt = datetime.fromtimestamp(float(timestamp))
- with open(COMMON_VERSIONS, 'r', encoding='utf-8') as f:
- common_list = pickle.load(f)
- for sv in common_list:
- sdt = datetime.strptime(sv['date'], '%Y-%m-%d %H:%M:%S.%f')
- if tdt >= sdt:
- return '%s.%s' % (sv['chrome_major_version'], sv['chromeos_version'])
- # should never reach here
- logger.GetLogger().LogFatal('No common version for timestamp')
- return None
-
-
-def Main(argv):
- """Checkout the ChromeOS source."""
- parser = argparse.ArgumentParser()
- parser.add_argument(
- '--dir',
- dest='directory',
- help='Target directory for ChromeOS installation.')
- parser.add_argument(
- '--version',
- dest='version',
- default='latest_lkgm',
- help="""ChromeOS version. Can be:
-(1) A release version in the format: 'X.X.X.X'
-(2) 'top' for top of trunk
-(3) 'latest_lkgm' for the latest lkgm version
-(4) 'lkgm' for the lkgm release before timestamp
-(5) 'latest_common' for the latest team common stable version
-(6) 'common' for the team common stable version before timestamp
-Default is 'latest_lkgm'.""")
- parser.add_argument(
- '--timestamp',
- dest='timestamp',
- default=None,
- help='Timestamps in epoch format. It will check out the'
- 'latest LKGM or the latest COMMON version of ChromeOS'
- ' before the timestamp. Use in combination with'
- ' --version=latest or --version=common. Use '
- '"date -d <date string> +%s" to find epoch time')
- parser.add_argument(
- '--minilayout',
- dest='minilayout',
- default=False,
- action='store_true',
- help='Whether to checkout the minilayout (smaller '
- 'checkout).')
- parser.add_argument(
- '--jobs', '-j', dest='jobs', help='Number of repo sync threads to use.')
- parser.add_argument(
- '--public',
- '-p',
- dest='public',
- default=False,
- action='store_true',
- help='Use the public checkout instead of the private '
- 'one.')
-
- options = parser.parse_args(argv)
-
- if not options.version:
- parser.print_help()
- logger.GetLogger().LogFatal('No version specified.')
- else:
- version = options.version.strip()
-
- if not options.timestamp:
- timestamp = ''
- else:
- timestamp = options.timestamp.strip()
- if version not in ('lkgm', 'common'):
- parser.print_help()
- logger.GetLogger().LogFatal('timestamp option only applies for '
- 'versions "lkgm" or "common"')
-
- if not options.directory:
- parser.print_help()
- logger.GetLogger().LogFatal('No directory specified.')
-
- directory = options.directory.strip()
-
- if options.public:
- manifest_repo = 'https://chromium.googlesource.com/chromiumos/manifest.git'
- versions_repo = ('https://chromium.googlesource.com/'
- 'chromiumos/manifest-versions.git')
- else:
- manifest_repo = ('https://chrome-internal.googlesource.com/chromeos/'
- 'manifest-internal.git')
- versions_repo = ('https://chrome-internal.googlesource.com/chromeos/'
- 'manifest-versions.git')
-
- if version == 'top':
- init = 'repo init -u %s' % manifest_repo
- elif version == 'latest_lkgm':
- manifests = manifest_versions.ManifestVersions()
- version = manifests.TimeToVersionChromeOS(time.mktime(time.gmtime()))
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput(
- 'found version %s.%s for latest LKGM' % (version, manifest))
- init = ('repo init -u %s -m paladin/buildspecs/%s/%s.xml' %
- (versions_repo, version, manifest))
- del manifests
- elif version == 'lkgm':
- if not timestamp:
- parser.print_help()
- logger.GetLogger().LogFatal('No timestamp specified for version=lkgm')
- manifests = manifest_versions.ManifestVersions()
- version = manifests.TimeToVersion(timestamp)
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput('found version %s.%s for LKGM at timestamp %s'
- % (version, manifest, timestamp))
- init = ('repo init -u %s -m paladin/buildspecs/%s/%s.xml' %
- (versions_repo, version, manifest))
- del manifests
- elif version == 'latest_common':
- version = TimeToCommonVersion(time.mktime(time.gmtime()))
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput(
- 'found version %s.%s for latest Common image' % (version, manifest))
- init = ('repo init -u %s -m buildspecs/%s/%s.xml' % (versions_repo, version,
- manifest))
- elif version == 'common':
- if not timestamp:
- parser.print_help()
- logger.GetLogger().LogFatal('No timestamp specified for version=lkgm')
- version = TimeToCommonVersion(timestamp)
- version, manifest = version.split('.', 1)
- logger.GetLogger().LogOutput(
- 'found version %s.%s for latest common image '
- 'at timestamp %s' % (version, manifest, timestamp))
- init = ('repo init -u %s -m buildspecs/%s/%s.xml' % (versions_repo, version,
- manifest))
- else:
- # user specified a specific version number
- version_spec_file = GetVersionSpecFile(version, versions_repo)
- if not version_spec_file:
- return 1
- init = 'repo init -u %s -m %s' % (versions_repo, version_spec_file)
-
- if options.minilayout:
- init += ' -g minilayout'
-
- init += ' --repo-url=https://chromium.googlesource.com/external/repo.git'
-
- # crosbug#31837 - "Sources need to be world-readable to properly
- # function inside the chroot"
- sync = 'umask 022 && repo sync'
- if options.jobs:
- sync += ' -j %s' % options.jobs
-
- commands = ['mkdir -p %s' % directory, 'cd %s' % directory, init, sync]
- cmd_executer = command_executer.GetCommandExecuter()
- ret = cmd_executer.RunCommands(commands)
- if ret:
- return ret
-
- return cmd_executer.RunCommand(
- 'git ls-remote '
- 'https://chrome-internal.googlesource.com/chrome/src-internal.git '
- 'refs/HEAD > /dev/null')
-
-
-if __name__ == '__main__':
- retval = Main(sys.argv[1:])
- sys.exit(retval)
diff --git a/toolchain_utils_githooks/check-presubmit.py b/toolchain_utils_githooks/check-presubmit.py
index 274a75f..99500ac 100755
--- a/toolchain_utils_githooks/check-presubmit.py
+++ b/toolchain_utils_githooks/check-presubmit.py
@@ -244,7 +244,7 @@
# pylint+golint.
def try_run_cros_lint(cros_binary: str) -> t.Optional[CheckResult]:
exit_code, output = run_command_unchecked(
- [cros_binary, 'lint', '--py3', '--'] + files,
+ [cros_binary, 'lint', '--'] + files,
toolchain_utils_root,
env=fixed_env)
diff --git a/unblocked_terms.txt b/unblocked_terms.txt
deleted file mode 100644
index c961e66..0000000
--- a/unblocked_terms.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-# Code uses this in many cases to refer to git branches.
-master
-
-# A few paths in AOSP contain this term.
-native
diff --git a/upstream_workon/upstream_workon.bash b/upstream_workon/upstream_workon.bash
new file mode 100755
index 0000000..f066bbe
--- /dev/null
+++ b/upstream_workon/upstream_workon.bash
@@ -0,0 +1,273 @@
+#!/bin/bash -eu
+#
+# Copyright 2021 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+USAGE=\
+'Usage: upstream-workon [-h]
+ init <PACKAGE> <DEV_WORK_DIR>
+ link <PACKAGE> <DEV_WORK_DIR>
+ build <PACKAGE>
+ install <PACKAGE>
+ clean <PACKAGE>
+ help'
+
+set +e
+read -r -d '' HELP <<'EOF'
+Usage: upstream-workon [-h]
+ init <PACKAGE> <DEV_WORK_DIR>
+ link <PACKAGE> <DEV_WORK_DIR>
+ build <PACKAGE>
+ install <PACKAGE>
+ clean <PACKAGE>
+ help
+
+Flags:
+ -h --help Print this help message
+
+Commands:
+ init Initialize in a developer workdir using a new tree
+ link Link an existing developer source dir to portage workdir
+ build Build the package using ebuild ... compile
+ install Install the package using ebuild ... install
+ clean Clean up your work without deleting the developer workdir
+ help Print this help message
+
+Examples:
+
+ # Start work
+ mkdir "$HOME/llvm"
+ upstream-workon init sys-devel/llvm "$HOME/llvm"
+
+ # Link your existing work
+ upstream-workon link sys-devel/llvm "$HOME/llvm"
+
+ # Compile your work
+ upstream-workon build sys-devel/llvm
+
+ # Install your changes to the chroot
+ upstream-workon install sys-devel/llvm
+
+ # Clean up
+ upstream-workon clean sys-devel/llvm
+
+EOF
+set -e
+
+incorrect_number_of_arguments() {
+ echo 'ERROR: Please use correct command syntax' >&2
+ echo "${USAGE}" >&2
+ exit 1
+}
+
+print_experimental_warning() {
+ echo >&2
+ echo '!!! WARNING: This tool is EXPERIMENTAL--please do not rely on the API.' >&2
+ echo '!!! WARNING: Please recommend new features for Version 2, but this' >&2
+ echo '!!! WARNING: implementation will not be actively developed and' >&2
+ echo '!!! WARNING: exists only to receive feedback and minor fixes.' >&2
+}
+
+# ------------------------------------------------------------------------------
+# Actual logic
+# ------------------------------------------------------------------------------
+
+# We probably can just pass through "USE", but I think this gives a bit more
+# flexibility in the future.
+USE_FLAGS="${USE:-}"
+
+if [[ -n "${USE_FLAGS}" ]]; then
+ echo 'USE flags set to:'
+ echo " ${USE_FLAGS}"
+fi
+
+init() {
+ local package="$1"
+ local desired_src_loc="$2"
+ local ebuild_loc
+ ebuild_loc="$(resolve_ebuild_for "${package}")"
+
+ local ebuild_name
+ ebuild_name="$(basename "${ebuild_loc}" | sed 's/\.ebuild$//g')"
+ local package_name
+ # SC2001 complains about not using variable replace syntax.
+ # However, variable remove syntax is not sufficiently expansive
+ # to do this replacement easily.
+ # shellcheck disable=2001
+ package_name="$(sed 's/-r[0-9]\+$//g' <<< "${ebuild_name}")"
+ local ebuild_category
+ ebuild_category="$(basename "$(dirname "$(dirname "${ebuild_loc}")")")"
+ local portage_dir='/var/tmp/portage'
+
+ local work_dir="${portage_dir}/${ebuild_category}/${ebuild_name}/work/${package_name}"
+
+ ebuild "${ebuild_loc}" clean
+ USE="${USE_FLAGS}" ebuild "${ebuild_loc}" unpack
+
+ # May need to init git if it doesn't already exist.
+ # Probably could just use git -C instead of the pushd/popd.
+ pushd "${work_dir}" >& /dev/null
+ if [[ ! -d '.git' ]]; then
+ git init
+ git add .
+ git commit -m 'Initial commit'
+ fi
+ popd >& /dev/null
+
+ USE="${USE_FLAGS}" ebuild "${ebuild_loc}" configure
+
+ cp -r -p "${work_dir}/." "${desired_src_loc}"
+ local backup_dir="${work_dir}.bk"
+ mv "${work_dir}" "${backup_dir}"
+ ln -s "$(realpath "${desired_src_loc}")" "${work_dir}"
+
+ pushd "${desired_src_loc}" >& /dev/null
+
+ git add .
+ git commit -m 'Ebuild configure commit'
+ popd >& /dev/null
+
+ echo
+ echo '----------------------------------------'
+ echo 'Successfully created local mirror!'
+ echo "Developer work directory set up at: ${desired_src_loc}"
+ echo 'To build the package, run:'
+ echo " upstream-workon build ${package}"
+ echo 'To install the package, run:'
+ echo " sudo upstream-workon install ${package}"
+ echo "To clean up (without deleting ${desired_src_loc}), run:"
+ echo " upstream-workon clean ${package}"
+ echo "WARNING: Moving original workdir to ${backup_dir}, consider deleting" >&2
+}
+
+clean() {
+ local package="$1"
+ echo 'WARNING: You may need to run this with sudo' >&2
+ local ebuild_loc
+ ebuild_loc="$(resolve_ebuild_for "${package}")"
+
+ ebuild "${ebuild_loc}" clean
+
+ echo '----------------------------------------'
+ echo "Successfully cleaned up ${package}!"
+}
+
+
+compile() {
+ local package="$1"
+ local ebuild_loc
+ ebuild_loc="$(resolve_ebuild_for "${package}")"
+
+ USE="${USE_FLAGS}" ebuild "${ebuild_loc}" compile
+
+ echo '----------------------------------------'
+ echo "Successfully compiled ${package}!"
+}
+
+
+install_src() {
+ local package="$1"
+ echo 'WARNING: You may need to run this with sudo' >&2
+ local ebuild_loc
+ ebuild_loc="$(resolve_ebuild_for "${package}")"
+
+ USE="${USE_FLAGS}" ebuild "${ebuild_loc}" install
+
+ echo '----------------------------------------'
+ echo "Successfully installed ${package}!"
+}
+
+link_src() {
+ local package="$1"
+ local desired_src_loc="$2"
+ local ebuild_loc
+ ebuild_loc="$(resolve_ebuild_for "${package}")"
+
+ local ebuild_name
+ ebuild_name="$(basename "${ebuild_loc}" | sed 's/\.ebuild$//g')"
+ local package_name
+ # shellcheck disable=2001
+ package_name="$(sed 's/-r[0-9]\+$//g' <<< "${ebuild_name}")"
+ local ebuild_category
+ ebuild_category="$(basename "$(dirname "$(dirname "${ebuild_loc}")")")"
+ local portage_dir='/var/tmp/portage'
+
+ local work_dir="${portage_dir}/${ebuild_category}/${ebuild_name}/work/${package_name}"
+
+ local backup_dir="${work_dir}.bk"
+
+ # Because of some annoying permissions issues, we have to configure directly in
+ # /var/tmp/portage/...
+ # We then copy over those changes into our local source directory.
+ # To make sure the proper deletions get done, we delete everything except
+ # your local git directory.
+
+ ebuild "${ebuild_loc}" clean
+ USE="${USE_FLAGS}" ebuild "${ebuild_loc}" configure
+ # TODO(ajordanr): This is a rough edge, and I don't want users to delete their
+ # home directory without knowing what they are doing. So we're copying
+ # everything instead.
+ # TODO(ajordanr): This will ignore git submodules, which I don't want.
+ mv "${desired_src_loc}" "${desired_src_loc}.bk"
+ mkdir "${desired_src_loc}"
+ cp -rP "${desired_src_loc}.bk/.git" "${desired_src_loc}/.git"
+ rsync -a --exclude=".git" "${work_dir}"/* "${desired_src_loc}"
+ rsync -a --exclude=".git" "${work_dir}"/.[^.]* "${desired_src_loc}"
+ mv "${work_dir}" "${backup_dir}"
+ ln -s "$(realpath "${desired_src_loc}")" "${work_dir}"
+
+ echo '----------------------------------------'
+ echo 'Successfully linked to local mirror!'
+ echo "Developer work directory linked to: ${desired_src_loc}"
+ echo "WARNING: Moving original workdir to ${backup_dir}, consider deleting" >&2
+ echo "WARNING: Moving original dev dir to ${desired_src_loc}.bk, consider deleting" >&2
+}
+
+resolve_ebuild_for() {
+ equery w "$1"
+}
+
+CMD="${1:-}"
+
+case "${CMD}" in
+ -h|--help|help)
+ shift
+ echo "${HELP}"
+ print_experimental_warning
+ exit 1
+ ;;
+ init)
+ shift
+ [[ -z "${1:-}" || -z "${2:-}" ]] && incorrect_number_of_arguments
+ print_experimental_warning
+ init "$1" "$2"
+ ;;
+ link)
+ shift
+ [[ -z "${1:-}" || -z "${2:-}" ]] && incorrect_number_of_arguments
+ print_experimental_warning
+ link_src "$1" "$2"
+ ;;
+ build)
+ shift
+ [[ -z "${1:-}" ]] && incorrect_number_of_arguments
+ print_experimental_warning
+ compile "$1"
+ ;;
+ clean)
+ shift
+ [[ -z "${1:-}" ]] && incorrect_number_of_arguments
+ print_experimental_warning
+ clean "$1"
+ ;;
+ install)
+ shift
+ [[ -z "${1:-}" ]] && incorrect_number_of_arguments
+ print_experimental_warning
+ install_src "$1"
+ ;;
+ *)
+ incorrect_number_of_arguments
+ ;;
+esac