Skip to content

Commit

Permalink
merge actions changes to 23-3 (#246)
Browse files Browse the repository at this point in the history
  • Loading branch information
librarian authored Jan 25, 2024
1 parent 77a89c8 commit 1a37a18
Show file tree
Hide file tree
Showing 29 changed files with 920 additions and 152 deletions.
20 changes: 14 additions & 6 deletions .github/actions/build/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ inputs:
required: true
default: "relwithdebinfo"
description: "relwithdebinfo, release-asan, release-tsan"
build_cache_update:
cache_update:
required: true
description: "bazel-remote upload results"
default: "false"
Expand All @@ -34,9 +34,9 @@ runs:
shell: bash
run: |
echo "SHELLOPTS=xtrace" >> $GITHUB_ENV
export TMP_DIR=$(pwd)/tmp_build
export TMP_DIR=/home/github/tmp_build
echo "TMP_DIR=$TMP_DIR" >> $GITHUB_ENV
rm -rf $TMP_DIR && mkdir $TMP_DIR
rm -rf $TMP_DIR && mkdir $TMP_DIR && chown -R github:github $TMP_DIR $GITHUB_WORKSPACE
- name: build
shell: bash
Expand All @@ -61,7 +61,7 @@ runs:
extra_params+=(--add-result .o)
fi
if [ "${{ inputs.build_cache_update }}" == "true" ]; then
if [ "${{ inputs.cache_update }}" == "true" ]; then
extra_params+=(--bazel-remote-put --dist-cache-evict-bins)
fi
Expand All @@ -88,7 +88,7 @@ runs:
;;
esac
./ya make -k --build "${build_type}" --force-build-depends -D'BUILD_LANGUAGES=CPP PY3 PY2 GO' -T --stat \
sudo -E -H -u github ./ya make -k --build "${build_type}" --force-build-depends -D'BUILD_LANGUAGES=CPP PY3 PY2 GO' -T --stat \
--log-file "$TMP_DIR/ya_log.txt" --evlog-file "$TMP_DIR/ya_evlog.jsonl" \
--dump-graph --dump-graph-to-file "$TMP_DIR/ya_graph.json" \
--cache-size 512G --link-threads "${{ inputs.link_threads }}" \
Expand All @@ -99,7 +99,15 @@ runs:
shell: bash
run: |
echo "::group::s3-sync"
s3cmd sync --acl-private --no-progress --stats --no-check-md5 "$TMP_DIR/" "$S3_BUCKET_PATH/build_logs/"
sudo -E -H -u github s3cmd sync --acl-private --no-progress --stats --no-check-md5 "$TMP_DIR/" "$S3_BUCKET_PATH/build_logs/"
echo "::endgroup::"
- name: Create directory listing on s3
if: always()
shell: bash
run: |
echo "::group::generate-listing"
sudo -E -H -u github python3 .github/scripts/index.py "$S3_BUCKET_PATH"
echo "::endgroup::"
- name: show free space
Expand Down
2 changes: 1 addition & 1 deletion .github/actions/prepare/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ runs:
sudo apt-get update
sudo apt-get install -y --no-install-recommends git wget gnupg lsb-release curl xz-utils tzdata cmake \
python3-dev python3-pip ninja-build antlr3 m4 libidn11-dev libaio1 libaio-dev make clang-14 lld-14 llvm-14 file \
distcc strace qemu-kvm dpkg-dev
distcc strace qemu-kvm qemu-utils dpkg-dev atop
sudo pip3 install conan==1.59 pytest==7.1.3 pytest-timeout pytest-xdist==3.3.1 setproctitle==1.3.2 grpcio grpcio-tools PyHamcrest tornado xmltodict pyarrow boto3 moto[server] psutil pygithub==1.59.1 pyinstaller==5.13.2 cryptography packaging six pyyaml
- name: install ccache
shell: bash
Expand Down
35 changes: 30 additions & 5 deletions .github/actions/s3cmd/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,12 @@ runs:
shell: bash
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends s3cmd
sudo apt-get install -y --no-install-recommends s3cmd
- name: configure s3cmd
shell: bash
run: |
export S3CMD_CONFIG=$(mktemp)
export S3CMD_CONFIG=$(mktemp -p /home/github)
chown github:github $S3CMD_CONFIG
echo "S3CMD_CONFIG=$S3CMD_CONFIG" >> $GITHUB_ENV
export GITHUB_WORKFLOW_NO_SPACES=${GITHUB_WORKFLOW// /-}
cat <<EOF > $S3CMD_CONFIG
Expand All @@ -45,6 +46,30 @@ runs:
host_bucket = %(bucket)s.storage.ai.nebius.cloud
EOF
mkdir -p /home/github/.aws/
cat <<EOF > /home/github/.aws/credentials
[default]
aws_access_key_id = ${s3_key_id}
aws_secret_access_key = ${s3_secret_access_key}
EOF
cat <<EOF > /home/github/.aws/config
[default]
region = eu-north1
endpoint_url=https://storage.ai.nebius.cloud/
EOF
mkdir -p /root/.aws/
cat <<EOF > /root/.aws/credentials
[default]
aws_access_key_id = ${s3_key_id}
aws_secret_access_key = ${s3_secret_access_key}
EOF
cat <<EOF > /root/.aws/config
[default]
region = eu-north1
endpoint_url=https://storage.ai.nebius.cloud/
EOF
folder="${{ runner.arch == 'X64' && 'x86-64' || runner.arch == 'ARM64' && 'arm64' || 'unknown' }}"
case "${{ inputs.build_preset }}" in
Expand All @@ -65,9 +90,9 @@ runs:
;;
esac
echo "S3_BUCKET_PATH=s3://${{ inputs.s3_bucket }}/${{ github.repository }}/${GITHUB_WORKFLOW_NO_SPACES}/${{ github.run_id }}/${{ inputs.folder_prefix }}${folder}" >> $GITHUB_ENV
echo "S3_URL_PREFIX=${{ inputs.s3_endpoint }}/${{ inputs.s3_bucket }}/${{ github.repository }}/${GITHUB_WORKFLOW_NO_SPACES}/${{ github.run_id }}/${{ inputs.folder_prefix }}${folder}" >> $GITHUB_ENV
echo "S3_WEBSITE_PREFIX=https://${{ inputs.s3_bucket }}.${{ inputs.s3_website_suffix }}/${{ github.repository }}/${GITHUB_WORKFLOW_NO_SPACES}/${{ github.run_id }}/${{ inputs.folder_prefix }}${folder}" >> $GITHUB_ENV
echo "S3_BUCKET_PATH=s3://${{ inputs.s3_bucket }}/${{ github.repository }}/${GITHUB_WORKFLOW_NO_SPACES}/${{ github.run_id }}/${{ github.run_attempt || '1' }}/${{ inputs.folder_prefix }}${folder}" >> $GITHUB_ENV
echo "S3_URL_PREFIX=${{ inputs.s3_endpoint }}/${{ inputs.s3_bucket }}/${{ github.repository }}/${GITHUB_WORKFLOW_NO_SPACES}/${{ github.run_id }}/${{ github.run_attempt || '1' }}/${{ inputs.folder_prefix }}${folder}" >> $GITHUB_ENV
echo "S3_WEBSITE_PREFIX=https://${{ inputs.s3_bucket }}.${{ inputs.s3_website_suffix }}/${{ github.repository }}/${GITHUB_WORKFLOW_NO_SPACES}/${{ github.run_id }}/${{ github.run_attempt || '1' }}/${{ inputs.folder_prefix }}${folder}" >> $GITHUB_ENV
env:
s3_key_id: ${{ inputs.s3_key_id }}
s3_secret_access_key: ${{ inputs.s3_key_secret }}
Expand Down
114 changes: 95 additions & 19 deletions .github/actions/test/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,13 @@ inputs:
bazel_remote_uri:
required: false
description: "bazel-remote endpoint"
test_cache_update:
cache_update:
required: false
description: "Use cache for tests"
sync_to_s3:
required: false
default: 'false'
description: 'Sync failed tests folders to s3'
runs:
using: composite
steps:
Expand All @@ -38,23 +42,29 @@ runs:
shell: bash
run: |
echo "SHELLOPTS=xtrace" >> $GITHUB_ENV
export TMP_DIR=$(pwd)/tmp
export TMP_DIR=/home/github/tmp
echo "TMP_DIR=$TMP_DIR" >> $GITHUB_ENV
echo "LOG_DIR=$TMP_DIR/logs" >> $GITHUB_ENV
echo "OUT_DIR=$TMP_DIR/out" >> $GITHUB_ENV
echo "ARTIFACTS_DIR=$TMP_DIR/artifacts" >> $GITHUB_ENV
echo "TESTS_DATA_DIR=$TMP_DIR/test_data" >> $GITHUB_ENV
echo "REPORTS_ARTIFACTS_DIR=$TMP_DIR/artifacts/test_reports" >> $GITHUB_ENV
echo "JUNIT_REPORT_XML=$TMP_DIR/junit.xml" >> $GITHUB_ENV
echo "SUMMARY_LINKS=$(mktemp)" >> $GITHUB_ENV
echo "JUNIT_REPORT_PARTS=$TMP_DIR/junit-split" >> $GITHUB_ENV
echo "SUMMARY_LINKS=$(mktemp -p /home/github)" >> $GITHUB_ENV
- name: prepare
shell: bash
run: |
rm -rf $TMP_DIR $JUNIT_REPORT_XML
mkdir -p $TMP_DIR $OUT_DIR $ARTIFACTS_DIR $LOG_DIR
rm -rf $TMP_DIR $JUNIT_REPORT_XML $JUNIT_REPORT_PARTS $REPORTS_ARTIFACTS_DIR $TESTS_DATA_DIR
mkdir -p $TMP_DIR $OUT_DIR $ARTIFACTS_DIR $LOG_DIR $JUNIT_REPORT_PARTS $REPORTS_ARTIFACTS_DIR $TESTS_DATA_DIR
chown -R github:github $TMP_DIR $OUT_DIR $ARTIFACTS_DIR $LOG_DIR $JUNIT_REPORT_PARTS \
$REPORTS_ARTIFACTS_DIR $SUMMARY_LINKS $GITHUB_WORKSPACE \
$GITHUB_STEP_SUMMARY $TESTS_DATA_DIR
- name: ya test
shell: bash
run: |
set -x
extra_params=()
# FIXME: copy-paste from build_ya
Expand Down Expand Up @@ -94,69 +104,131 @@ runs:
extra_params+=(--bazel-remote-base-uri "${{ inputs.bazel_remote_uri }}")
fi
if [ "${{ inputs.test_cache_update }}" = "true" ]; then
if [ "${{ inputs.cache_update }}" = "true" ]; then
extra_params+=(--cache-tests)
fi
readarray -d ',' -t test_size < <(printf "%s" "${{ inputs.test_size }}")
readarray -d ',' -t test_type < <(printf "%s" "${{ inputs.test_type }}")
./ya test -k --build "${build_type}" -D'BUILD_LANGUAGES=CPP PY3 PY2 GO' \
echo "::group::ya-make-test"
sudo -E -H -u github ./ya test -k --build "${build_type}" -D'BUILD_LANGUAGES=CPP PY3 PY2 GO' \
${test_size[@]/#/--test-size=} ${test_type[@]/#/--test-type=} \
--test-threads "${{ inputs.test_threads }}" --link-threads "${{ inputs.link_threads }}" \
--cache-size 512G --do-not-output-stderrs -T \
--stat --log-file "$LOG_DIR/ya_log.txt" --evlog-file "$LOG_DIR/ya_evlog.jsonl" \
--canonization-backend=ydb-canondata.storage.yandexcloud.net \
--junit "$JUNIT_REPORT_XML" --output "$OUT_DIR" "${extra_params[@]}" || (
RC=$?
if [[ $RC -ge 10 && $RC -le 14 ]]; then
echo "ya test returned failed tests status, recovering.."
else
exit $RC
if [ $RC -ne 0 ]; then
echo "ya test returned $RC, check existence $JUNIT_REPORT_XML"
if [ -s "$JUNIT_REPORT_XML" ]; then
echo "$JUNIT_REPORT_XML exists"
ls -la "$JUNIT_REPORT_XML"
else
echo "$JUNIT_REPORT_XML doesn't exist or has zero size"
ls -la "$JUNIT_REPORT_XML" || true
exit $RC
fi
fi
)
echo "::endgroup::"
- name: archive unitest reports (orig)
shell: bash
run: |
sudo -E -H -u github gzip -c $JUNIT_REPORT_XML > $REPORTS_ARTIFACTS_DIR/orig_junit.xml.gz
- name: postprocess junit report
shell: bash
run: |
.github/scripts/tests/transform-ya-junit.py -i \
sudo -E -H -u github .github/scripts/tests/transform-ya-junit.py -i \
-m .github/config/muted_ya.txt \
--ya-out "$OUT_DIR" \
--log-url-prefix "$S3_WEBSITE_PREFIX/logs/" \
--log-out-dir "$ARTIFACTS_DIR/logs/" \
"$JUNIT_REPORT_XML"
sudo -E -H -u github .github/scripts/tests/split-junit.py -o "$JUNIT_REPORT_PARTS" "$JUNIT_REPORT_XML"
- name: archive unitest reports (transformed)
shell: bash
run: |
sudo -E -H -u github tar -C $JUNIT_REPORT_PARTS/.. -czf $REPORTS_ARTIFACTS_DIR/junit_parts.xml.tar.gz $(basename $JUNIT_REPORT_PARTS) $JUNIT_REPORT_XML
- name: write tests summary
shell: bash
env:
GITHUB_TOKEN: ${{ github.token }}
run: |
mkdir $ARTIFACTS_DIR/summary/
sudo -E -H -u github mkdir $ARTIFACTS_DIR/summary/
cat $SUMMARY_LINKS | python3 -c 'import sys; print(" | ".join([v for _, v in sorted([l.strip().split(" ", 1) for l in sys.stdin], key=lambda a: (int(a[0]), a))]))' >> $GITHUB_STEP_SUMMARY
platform_name=$(uname | tr '[:upper:]' '[:lower:]')-$(arch)
.github/scripts/tests/generate-summary.py \
export SUMMARY_OUT_ENV_PATH=$(mktemp -p /home/github)
chown github:github $SUMMARY_OUT_ENV_PATH
sudo -E -H -u github .github/scripts/tests/generate-summary.py \
--summary-out-path "$ARTIFACTS_DIR/summary/" \
--summary-out-env-path "$SUMMARY_OUT_ENV_PATH" \
--summary-url-prefix "$S3_WEBSITE_PREFIX/summary/" \
--build-preset "${platform_name}-${{ inputs.build_preset }}" \
"Tests" ya-test.html "$JUNIT_REPORT_XML"
cat $SUMMARY_OUT_ENV_PATH | tee -a $GITHUB_STEP_SUMMARY
- name: check test results
shell: bash
run: |
set -x
sudo -E -H -u github .github/scripts/tests/fail-checker.py "$JUNIT_REPORT_XML" || {
RC=$?
echo "::group::Copy-failed-tests-data"
sudo -E -H -u github .github/scripts/tests/fail-checker.py "$JUNIT_REPORT_XML" --paths-only
sudo -E -H -u github .github/scripts/tests/fail-checker.py "$JUNIT_REPORT_XML" --paths-only | while read path; do
echo $path
find "${GITHUB_WORKSPACE}/${path}" -print0 | xargs -0 xargs -0 cp -L -r --parents -t "$TESTS_DATA_DIR"
done
chown -R github:github "$TESTS_DATA_DIR"
echo "::endgroup::"
echo "::group::remove-binaries-from-tests-data-dir"
find "$TESTS_DATA_DIR" -type f -print0 | xargs -0 -n 10 file -i | grep "application/x-executable" | awk -F: '{print $1}'
find "$TESTS_DATA_DIR" -type f -print0 | xargs -0 -n 10 file -i | grep "application/x-executable" | awk -F: '{print $1}' | xargs rm
echo "::endgroup::"
echo "::group::s3-sync"
if [ "$SYNC_TO_S3" = "true" ];
then
sudo -E -H -u github s3cmd sync --follow-symlinks --acl-private --no-progress --stats --no-check-md5 "$TESTS_DATA_DIR/" "$S3_BUCKET_PATH/test_data/"
fi
echo "::endgroup::"
exit $RC
}
env:
SYNC_TO_S3: ${{ inputs.sync_to_s3 || 'false' }}

- name: Sync test results to S3
if: always()
shell: bash
run: |
echo "::group::s3-sync"
s3cmd sync --follow-symlinks --acl-public --no-progress --stats --no-check-md5 "$ARTIFACTS_DIR/" "$S3_BUCKET_PATH/"
sudo -E -H -u github s3cmd sync --follow-symlinks --acl-public --no-progress --stats --no-check-md5 "$ARTIFACTS_DIR/" "$S3_BUCKET_PATH/"
echo "::endgroup::"
- name: Sync logs results to S3
if: always()
shell: bash
run: |
echo "::group::s3-sync"
s3cmd sync --follow-symlinks --acl-private --no-progress --stats --no-check-md5 "$LOG_DIR/" "$S3_BUCKET_PATH/test_logs/"
sudo -E -H -u github s3cmd sync --follow-symlinks --acl-private --no-progress --stats --no-check-md5 "$LOG_DIR/" "$S3_BUCKET_PATH/test_logs/"
echo "::endgroup::"
- name: Sync reports to S3
if: always()
shell: bash
run: |
echo "::group::s3-sync"
sudo -E -H -u github s3cmd sync --follow-symlinks --acl-private --no-progress --stats --no-check-md5 "$LOG_DIR/" "$S3_BUCKET_PATH/test_logs/"
echo "::endgroup::"
- name: Display links to s3 summary
Expand All @@ -167,10 +239,14 @@ runs:
echo ${S3_URL_PREFIX}/summary/ya-test.html
echo ${S3_WEBSITE_PREFIX}/summary/ya-test.html
echo "::endgroup::"
- name: check test results
- name: Create directory listing on s3
if: always()
shell: bash
run: |
.github/scripts/tests/fail-checker.py "$JUNIT_REPORT_XML"
echo "::group::generate-listing"
sudo -E -H -u github python3 .github/scripts/index.py "$S3_BUCKET_PATH"
echo "::endgroup::"
- name: show free space
if: always()
Expand Down
Loading

0 comments on commit 1a37a18

Please sign in to comment.