diff --git a/.circleci/template.yml b/.circleci/template.yml index 5d0ce1e37a..d5ed39cb68 100644 --- a/.circleci/template.yml +++ b/.circleci/template.yml @@ -310,7 +310,7 @@ commands: name: Upload results when: always command: | - tools/circleci-prepare-log-dir.sh + tools/prepare-log-dir.sh if [ -n "${AWS_SECRET_ACCESS_KEY}" ]; then tools/circleci-upload-to-s3.sh; fi report_failed_test_cases_to_ga4: steps: diff --git a/tools/circle-publish-github-comment.sh b/tools/circle-publish-github-comment.sh index 6a1cf34159..6bb0ef19da 100755 --- a/tools/circle-publish-github-comment.sh +++ b/tools/circle-publish-github-comment.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -source tools/circleci-helpers.sh +source tools/helpers.sh set -e @@ -72,7 +72,7 @@ function rewrite_log_links_to_s3 local CT_REPORT=big_tests/ct_report local CT_REPORT_ABS=$(./tools/abs_dirpath.sh "$CT_REPORT") local CT_REPORTS=$(ct_reports_dir) - local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big" + local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})" cp /tmp/ct_markdown /tmp/ct_markdown_original replace_string "$CT_REPORT_ABS" "$BIG_TESTS_URL" /tmp/ct_markdown # URL escape for s3_reports.html script @@ -97,7 +97,7 @@ function small_suite_path function ct_run_url { local CT_REPORTS=$(ct_reports_dir) - local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big" + local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})" local RUN_PART=$(echo "$(last_ct_run_name)" | sed "s/@/%40/g") echo "$BIG_TESTS_URL/$RUN_PART/index.html" } @@ -105,7 +105,7 @@ function ct_run_url function ct_small_url { local CT_REPORTS=$(ct_reports_dir) - local SMALL_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/small" + local SMALL_TESTS_URL="$(archive_reader_url small ${CT_REPORTS})" local SUFFIX=$(small_suite_path) echo "$SMALL_TESTS_URL/$SUFFIX" } diff --git a/tools/circleci-helpers.sh b/tools/circleci-helpers.sh deleted file mode 100644 index ce84bea2aa..0000000000 --- a/tools/circleci-helpers.sh +++ /dev/null @@ -1,32 +0,0 @@ -ct_reports_dir() { - local BUILD_NO=${CIRCLE_BUILD_NUM:-ct_reports} - local PRESET_NAME=${PRESET:-default} - OTP_VERSION=`cat otp_version` - local ERLANG=${OTP_VERSION:-default} - local CT_REPORTS="${BUILD_NO}/${PRESET_NAME}.${ERLANG}" - local BRANCH=${CIRCLE_BRANCH:-master} - PR_NUM=`basename $CI_PULL_REQUEST` - local PR=${PR_NUM:-false} - - - if [ ${PR} == false ]; then - echo "branch/${BRANCH}/${CT_REPORTS}" - else - echo "PR/${PR}/${CT_REPORTS}" - fi - -} - -# Works for directories only -# Allows to list directories content - -s3_url() { - local CT_REPORTS=${1:-} - echo "http://esl.github.io/circleci-mim-results/s3_reports.html?prefix=${CT_REPORTS}" -} - -direct_s3_url() { - local CT_REPORTS=${1:-} - echo "https://circleci-mim-results.s3.eu-central-1.amazonaws.com/${CT_REPORTS}" -} - diff --git a/tools/circleci-prepare-log-dir.sh b/tools/circleci-prepare-log-dir.sh deleted file mode 100755 index a42a8dec1a..0000000000 --- a/tools/circleci-prepare-log-dir.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash - -source tools/circleci-helpers.sh - -set -euo pipefail -IFS=$'\n\t' - -# Relative directory name -CT_REPORTS=$(ct_reports_dir) -mkdir -p ${CT_REPORTS}/small -mkdir -p ${CT_REPORTS}/big - -if [ -d _build/test/logs ]; then - cp -Rp _build/test/logs/* ${CT_REPORTS}/small -fi - -CT_REPORT=big_tests/ct_report - -if [ -d ${CT_REPORT} ] && [ "$(ls -A ${CT_REPORT})" ]; then - cp -Rp ${CT_REPORT}/* ${CT_REPORTS}/big -fi - -cat > ${CT_REPORTS}/index.html << EOL - - - -

Small tests (test/)

-

Big tests (big_tests/)

- - -EOL - -CT_REPORTS_FULL=$(cd "$CT_REPORTS" && pwd) - -now=`date +'%Y-%m-%d_%H.%M.%S'` -# Replace all occurrences of / with _ -PREFIX="${CT_REPORTS//\//_}" - -# Optimize naming, so it is easy to extract on MacOS just by clicking it -# and with reasonable directory names -LOG_DIR_ROOT=${CT_REPORTS}/logs/${PREFIX}_${now} -LOG_ZIP=${CT_REPORTS_FULL}/logs_${PREFIX}_${now}.tar.gz -for dev_node_logs_path in `find _build -name log -type d`; do - dev_node=$(basename $(dirname $(dirname $(dirname ${dev_node_logs_path})))) - LOG_DIR=${LOG_DIR_ROOT}/${dev_node}/ - mkdir -p ${LOG_DIR} - cp ${dev_node_logs_path}/* ${LOG_DIR} -done - -cp *.log ${LOG_DIR_ROOT} -cp big_tests/*.log ${LOG_DIR_ROOT} || true - -OLD_DIR=$(pwd) - -# cd so we don't include nested dirs in the archive (for example, PR/4366/236412) -cd "$LOG_DIR_ROOT/.." - -# Zip to safe space -tar -czvf "$LOG_ZIP" "$(basename "$LOG_DIR_ROOT")" - -cd "$OLD_DIR" - -# Slightly faster than removing -mv "$LOG_DIR_ROOT" /tmp/ diff --git a/tools/circleci-upload-to-s3.sh b/tools/circleci-upload-to-s3.sh index db951a9d67..67482f14ae 100755 --- a/tools/circleci-upload-to-s3.sh +++ b/tools/circleci-upload-to-s3.sh @@ -1,6 +1,6 @@ #!/bin/bash -source tools/circleci-helpers.sh +source tools/helpers.sh set -euo pipefail diff --git a/tools/gh-upload-to-s3.sh b/tools/gh-upload-to-s3.sh index 40bea78676..e1dc3ac6c0 100755 --- a/tools/gh-upload-to-s3.sh +++ b/tools/gh-upload-to-s3.sh @@ -29,4 +29,6 @@ prefix="GH/${GITHUB_RUN_ID}/${GITHUB_RUN_ATTEMPT}/${PRESET}.${RANDOM}/${dest_dir echo "directory '${dir}' is uploaded here:" echo " https://esl.github.io/circleci-mim-results/s3_reports.html?prefix=${prefix}" +# TODO: add links for tar.gz viewers + time aws s3 cp "$dir" s3://circleci-mim-results/"${prefix}" --acl public-read --recursive --quiet diff --git a/tools/helpers.sh b/tools/helpers.sh index 1c00dcbda4..eb116a73bb 100644 --- a/tools/helpers.sh +++ b/tools/helpers.sh @@ -1,4 +1,31 @@ ct_reports_dir() { + if [[ "$CIRCLECI" == true ]]; then + ct_reports_dir_circleci + else + ct_reports_dir_github + fi +} + +ct_reports_dir_circleci() { + local BUILD_NO=${CIRCLE_BUILD_NUM:-ct_reports} + local PRESET_NAME=${PRESET:-default} + OTP_VERSION=`cat otp_version` + local ERLANG=${OTP_VERSION:-default} + local CT_REPORTS="${BUILD_NO}/${PRESET_NAME}.${ERLANG}" + local BRANCH=${CIRCLE_BRANCH:-master} + PR_NUM=`basename $CI_PULL_REQUEST` + local PR=${PR_NUM:-false} + + + if [ ${PR} == false ]; then + echo "branch/${BRANCH}/${CT_REPORTS}" + else + echo "PR/${PR}/${CT_REPORTS}" + fi +} + +ct_reports_dir_github() { + # Note, that tools/gh-upload-to-s3.sh uploads to a different random prefix local BUILD_NO=${GITHUB_RUN_NUMBER:-ct_reports} local PRESET_NAME=${PRESET:-default} # @TODO CI: @@ -13,21 +40,27 @@ ct_reports_dir() { local PR=${CI_PULL_REQUEST:-false} if [ ${PR} == false ]; then - echo "branch/${BRANCH}/${CT_REPORTS}" + echo "branch/${BRANCH}/${CT_REPORTS}" else - echo "PR/${PR}/${CT_REPORTS}" + echo "PR/${PR}/${CT_REPORTS}" fi - } # Works for directories only # Allows to list directories content + s3_url() { local CT_REPORTS=${1:-} - echo "http://esl.github.io/mongooseim-ct-reports/s3_reports.html?prefix=${CT_REPORTS}" + echo "http://esl.github.io/circleci-mim-results/s3_reports.html?prefix=${CT_REPORTS}" } direct_s3_url() { local CT_REPORTS=${1:-} - echo "https://mongooseim-ct-results.s3-eu-west-1.amazonaws.com/${CT_REPORTS}" + echo "https://circleci-mim-results.s3.eu-central-1.amazonaws.com/${CT_REPORTS}" +} + +archive_reader_url() { + local TEST_TYPE=$1 + local CT_REPORTS=${2:-} + echo "https://esl.github.io/html-zip-reader/${CT_REPORTS}/${TEST_TYPE}.tar.gz/" } diff --git a/tools/prepare-log-dir.sh b/tools/prepare-log-dir.sh index b28cd578aa..03f604e874 100755 --- a/tools/prepare-log-dir.sh +++ b/tools/prepare-log-dir.sh @@ -2,41 +2,64 @@ source tools/helpers.sh +REPO_DIR=$(pwd) + set -euo pipefail IFS=$'\n\t' +# Relative directory name CT_REPORTS=$(ct_reports_dir) -mkdir -p ${CT_REPORTS}/small -mkdir -p ${CT_REPORTS}/big - -if [ -d _build/test/logs ]; then - cp -Rp _build/test/logs/* ${CT_REPORTS}/small -fi - -CT_REPORT=big_tests/ct_report - -if [ -d ${CT_REPORT} ] && [ "$(ls -A ${CT_REPORT})" ]; then - cp -Rp ${CT_REPORT}/* ${CT_REPORTS}/big -fi - -cat > ${CT_REPORTS}/index.html << EOL - - - -

Small tests (test/)

-

Big tests (big_tests/)

- - -EOL +mkdir -p "$CT_REPORTS" +CT_REPORTS_FULL=$(cd "$CT_REPORTS" && pwd) now=`date +'%Y-%m-%d_%H.%M.%S'` -LOG_DIR_ROOT=${CT_REPORTS}/logs/${now} +# Replace all occurrences of / with _ +PREFIX="${CT_REPORTS//\//_}" + +# Optimize naming, so it is easy to extract on MacOS just by clicking it +# and with reasonable directory names +LOG_DIR_ROOT=${CT_REPORTS}/logs/${PREFIX}_${now} +LOG_ZIP=${CT_REPORTS_FULL}/logs_${PREFIX}_${now}.tar.gz for dev_node_logs_path in `find _build -name log -type d`; do dev_node=$(basename $(dirname $(dirname $(dirname ${dev_node_logs_path})))) - LOG_DIR=${LOG_DIR_ROOT}/${dev_node}/log + LOG_DIR=${LOG_DIR_ROOT}/${dev_node}/ mkdir -p ${LOG_DIR} - cp ${dev_node_logs_path}/* ${LOG_DIR} + mv ${dev_node_logs_path}/* ${LOG_DIR} done -cp *.log ${LOG_DIR_ROOT} -cp big_tests/*.log ${LOG_DIR_ROOT} +mv *.log ${LOG_DIR_ROOT} +mv big_tests/*.log ${LOG_DIR_ROOT} || true + +# cd so we don't include nested dirs in the archive (for example, PR/4366/236412) +cd "$LOG_DIR_ROOT/.." + +# Zip to safe space +tar -czf "$LOG_ZIP" "$(basename "$LOG_DIR_ROOT")" + +cd "$REPO_DIR" + +# Slightly faster than removing +mv "$LOG_DIR_ROOT" /tmp/ + +# Compress big ct_reports +BIG_REPORTS_DIR="$(pwd)/big_tests/ct_report" +SMALL_REPORTS_DIR="$(pwd)/_build/test/logs" + +if [ -f ${BIG_REPORTS_DIR}/index.html ]; then + cd ${BIG_REPORTS_DIR} + # Ignore GDPR extracted logs + # They are primarily empty files + tar \ + --exclude='./ct_run*/*.logs/last_link.html' \ + --exclude='./ct_run*/*.logs/last_name' \ + --exclude='./ct_run*/*.unzipped' \ + -czf "${CT_REPORTS_FULL}/big.tar.gz" . +fi + +if [ -f ${SMALL_REPORTS_DIR}/index.html ]; then + cd ${SMALL_REPORTS_DIR} + tar \ + --exclude='./ct_run*/*.logs/last_link.html' \ + --exclude='./ct_run*/*.logs/last_name' \ + -czf "${CT_REPORTS_FULL}/small.tar.gz" . +fi diff --git a/tools/publish-github-comment.sh b/tools/publish-github-comment.sh index 5a76d5cf37..8b190dd2ab 100755 --- a/tools/publish-github-comment.sh +++ b/tools/publish-github-comment.sh @@ -1,5 +1,8 @@ #!/usr/bin/env bash +# TODO: publish comment with errors to a separate dedicated issue on GitHub, +# if Github Actions fails + source tools/helpers.sh set -e @@ -67,7 +70,7 @@ function rewrite_log_links_to_s3 local CT_REPORT=big_tests/ct_report local CT_REPORT_ABS=$(./tools/abs_dirpath.sh "$CT_REPORT") local CT_REPORTS=$(ct_reports_dir) - local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big" + local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})" cp /tmp/ct_markdown /tmp/ct_markdown_original replace_string "$CT_REPORT_ABS" "$BIG_TESTS_URL" /tmp/ct_markdown # URL escape for s3_reports.html script @@ -92,7 +95,7 @@ function small_suite_path function ct_run_url { local CT_REPORTS=$(ct_reports_dir) - local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big" + local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})" local RUN_PART=$(echo "$(last_ct_run_name)" | sed "s/@/%40/g") echo "$BIG_TESTS_URL/$RUN_PART/index.html" } @@ -100,7 +103,7 @@ function ct_run_url function ct_small_url { local CT_REPORTS=$(ct_reports_dir) - local SMALL_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/small" + local SMALL_TESTS_URL="$(archive_reader_url small ${CT_REPORTS})" local SUFFIX=$(small_suite_path) echo "$SMALL_TESTS_URL/$SUFFIX" } diff --git a/tools/test.sh b/tools/test.sh index 1d0cc32790..44eb405618 100755 --- a/tools/test.sh +++ b/tools/test.sh @@ -46,12 +46,7 @@ while getopts ":p:s:e:c:h:" opt; do done source tools/common-vars.sh - -if [ ${CIRCLECI} ]; then -source tools/circleci-helpers.sh -else source tools/helpers.sh -fi if [ "${AWS_SECRET_ACCESS_KEY}" ]; then CT_REPORTS=$(ct_reports_dir) diff --git a/tools/upload-to-s3.sh b/tools/upload-to-s3.sh deleted file mode 100755 index ff36ebc439..0000000000 --- a/tools/upload-to-s3.sh +++ /dev/null @@ -1,37 +0,0 @@ -#!/bin/bash - -source tools/helpers.sh - -set -euo pipefail - -CT_REPORTS=$(ct_reports_dir) - -if [ ! -d "${CT_REPORTS}" ]; then - echo "Skip uploading, because $CT_REPORTS directory does not exist" - exit 0 -fi - -echo "Uploading test results to s3" -echo $(s3_url ${CT_REPORTS}) - -echo "Installing s3-parallel-put based on boto" -sudo time pip install boto python-magic -# The fork of s3-parallel-put has some small optimizations. -S3PP_COMMIT=6fd430a54e976d2d580042efdf82ac2fb66d5e57 -wget -O tools/s3-parallel-put https://raw.githubusercontent.com/arcusfelis/s3-parallel-put/$S3PP_COMMIT/s3-parallel-put -chmod +x tools/s3-parallel-put - - -# XXX please, reduce number of files -FILE_COUNT=$(find "${CT_REPORTS}" -type f | wc -l) -echo "Uploading $FILE_COUNT files" - -AWS_BUCKET="${AWS_BUCKET:-mongooseim-ct-results}" -# We don't expect write conflicts, so we use put=stupid to reduce operations. -# -# Docs for the tool -# https://github.com/mishudark/s3-parallel-put -# -# 64 processes works better for our case, than 32 and 8 (default). -time tools/s3-parallel-put --quiet --processes=64 --put=stupid \ - --bucket_region=$AWS_DEFAULT_REGION --bucket=$AWS_BUCKET --prefix=${CT_REPORTS} ${CT_REPORTS}