Skip to content

Commit

Permalink
Merge pull request #4367 from esl/compress-logs-on-s3-ct
Browse files Browse the repository at this point in the history
Optimize (compress) ct_report logs on CI and read them using a service worker in Browser

This PR addresses MIM-2283 "Optimize (compress) ct_report logs somehow on CI"

Proposed changes include:
    Compress files into tar.gz.
    We use https://github.com/esl/html-zip-reader to read the archive and serve it using a service worker.
    The logic for the service worker could be reviewed in this PR: esl/html-zip-reader#1
    We can maybe stop using s3-parallel-put (unless it is smaller than awscli tools). But in a separate PR :)
  • Loading branch information
NelsonVides authored Sep 6, 2024
2 parents 0ee4c89 + 3fea460 commit 1680efa
Show file tree
Hide file tree
Showing 11 changed files with 102 additions and 179 deletions.
2 changes: 1 addition & 1 deletion .circleci/template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@ commands:
name: Upload results
when: always
command: |
tools/circleci-prepare-log-dir.sh
tools/prepare-log-dir.sh
if [ -n "${AWS_SECRET_ACCESS_KEY}" ]; then tools/circleci-upload-to-s3.sh; fi
report_failed_test_cases_to_ga4:
steps:
Expand Down
8 changes: 4 additions & 4 deletions tools/circle-publish-github-comment.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/usr/bin/env bash

source tools/circleci-helpers.sh
source tools/helpers.sh

set -e

Expand Down Expand Up @@ -72,7 +72,7 @@ function rewrite_log_links_to_s3
local CT_REPORT=big_tests/ct_report
local CT_REPORT_ABS=$(./tools/abs_dirpath.sh "$CT_REPORT")
local CT_REPORTS=$(ct_reports_dir)
local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big"
local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})"
cp /tmp/ct_markdown /tmp/ct_markdown_original
replace_string "$CT_REPORT_ABS" "$BIG_TESTS_URL" /tmp/ct_markdown
# URL escape for s3_reports.html script
Expand All @@ -97,15 +97,15 @@ function small_suite_path
function ct_run_url
{
local CT_REPORTS=$(ct_reports_dir)
local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big"
local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})"
local RUN_PART=$(echo "$(last_ct_run_name)" | sed "s/@/%40/g")
echo "$BIG_TESTS_URL/$RUN_PART/index.html"
}

function ct_small_url
{
local CT_REPORTS=$(ct_reports_dir)
local SMALL_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/small"
local SMALL_TESTS_URL="$(archive_reader_url small ${CT_REPORTS})"
local SUFFIX=$(small_suite_path)
echo "$SMALL_TESTS_URL/$SUFFIX"
}
Expand Down
32 changes: 0 additions & 32 deletions tools/circleci-helpers.sh

This file was deleted.

64 changes: 0 additions & 64 deletions tools/circleci-prepare-log-dir.sh

This file was deleted.

2 changes: 1 addition & 1 deletion tools/circleci-upload-to-s3.sh
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#!/bin/bash

source tools/circleci-helpers.sh
source tools/helpers.sh

set -euo pipefail

Expand Down
2 changes: 2 additions & 0 deletions tools/gh-upload-to-s3.sh
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,6 @@ prefix="GH/${GITHUB_RUN_ID}/${GITHUB_RUN_ATTEMPT}/${PRESET}.${RANDOM}/${dest_dir
echo "directory '${dir}' is uploaded here:"
echo " https://esl.github.io/circleci-mim-results/s3_reports.html?prefix=${prefix}"

# TODO: add links for tar.gz viewers

time aws s3 cp "$dir" s3://circleci-mim-results/"${prefix}" --acl public-read --recursive --quiet
43 changes: 38 additions & 5 deletions tools/helpers.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,31 @@
ct_reports_dir() {
if [[ "$CIRCLECI" == true ]]; then
ct_reports_dir_circleci
else
ct_reports_dir_github
fi
}

ct_reports_dir_circleci() {
local BUILD_NO=${CIRCLE_BUILD_NUM:-ct_reports}
local PRESET_NAME=${PRESET:-default}
OTP_VERSION=`cat otp_version`
local ERLANG=${OTP_VERSION:-default}
local CT_REPORTS="${BUILD_NO}/${PRESET_NAME}.${ERLANG}"
local BRANCH=${CIRCLE_BRANCH:-master}
PR_NUM=`basename $CI_PULL_REQUEST`
local PR=${PR_NUM:-false}


if [ ${PR} == false ]; then
echo "branch/${BRANCH}/${CT_REPORTS}"
else
echo "PR/${PR}/${CT_REPORTS}"
fi
}

ct_reports_dir_github() {
# Note, that tools/gh-upload-to-s3.sh uploads to a different random prefix
local BUILD_NO=${GITHUB_RUN_NUMBER:-ct_reports}
local PRESET_NAME=${PRESET:-default}
# @TODO CI:
Expand All @@ -13,21 +40,27 @@ ct_reports_dir() {
local PR=${CI_PULL_REQUEST:-false}

if [ ${PR} == false ]; then
echo "branch/${BRANCH}/${CT_REPORTS}"
echo "branch/${BRANCH}/${CT_REPORTS}"
else
echo "PR/${PR}/${CT_REPORTS}"
echo "PR/${PR}/${CT_REPORTS}"
fi

}

# Works for directories only
# Allows to list directories content

s3_url() {
local CT_REPORTS=${1:-}
echo "http://esl.github.io/mongooseim-ct-reports/s3_reports.html?prefix=${CT_REPORTS}"
echo "http://esl.github.io/circleci-mim-results/s3_reports.html?prefix=${CT_REPORTS}"
}

direct_s3_url() {
local CT_REPORTS=${1:-}
echo "https://mongooseim-ct-results.s3-eu-west-1.amazonaws.com/${CT_REPORTS}"
echo "https://circleci-mim-results.s3.eu-central-1.amazonaws.com/${CT_REPORTS}"
}

archive_reader_url() {
local TEST_TYPE=$1
local CT_REPORTS=${2:-}
echo "https://esl.github.io/html-zip-reader/${CT_REPORTS}/${TEST_TYPE}.tar.gz/"
}
77 changes: 50 additions & 27 deletions tools/prepare-log-dir.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,41 +2,64 @@

source tools/helpers.sh

REPO_DIR=$(pwd)

set -euo pipefail
IFS=$'\n\t'

# Relative directory name
CT_REPORTS=$(ct_reports_dir)
mkdir -p ${CT_REPORTS}/small
mkdir -p ${CT_REPORTS}/big

if [ -d _build/test/logs ]; then
cp -Rp _build/test/logs/* ${CT_REPORTS}/small
fi

CT_REPORT=big_tests/ct_report

if [ -d ${CT_REPORT} ] && [ "$(ls -A ${CT_REPORT})" ]; then
cp -Rp ${CT_REPORT}/* ${CT_REPORTS}/big
fi

cat > ${CT_REPORTS}/index.html << EOL
<html>
<head></head>
<body>
<p><a href="small/index.html">Small tests (test/)</a></p>
<p><a href="big/index.html">Big tests (big_tests/)</a></p>
</body>
</html>
EOL
mkdir -p "$CT_REPORTS"
CT_REPORTS_FULL=$(cd "$CT_REPORTS" && pwd)

now=`date +'%Y-%m-%d_%H.%M.%S'`
LOG_DIR_ROOT=${CT_REPORTS}/logs/${now}
# Replace all occurrences of / with _
PREFIX="${CT_REPORTS//\//_}"

# Optimize naming, so it is easy to extract on MacOS just by clicking it
# and with reasonable directory names
LOG_DIR_ROOT=${CT_REPORTS}/logs/${PREFIX}_${now}
LOG_ZIP=${CT_REPORTS_FULL}/logs_${PREFIX}_${now}.tar.gz
for dev_node_logs_path in `find _build -name log -type d`; do
dev_node=$(basename $(dirname $(dirname $(dirname ${dev_node_logs_path}))))
LOG_DIR=${LOG_DIR_ROOT}/${dev_node}/log
LOG_DIR=${LOG_DIR_ROOT}/${dev_node}/
mkdir -p ${LOG_DIR}
cp ${dev_node_logs_path}/* ${LOG_DIR}
mv ${dev_node_logs_path}/* ${LOG_DIR}
done

cp *.log ${LOG_DIR_ROOT}
cp big_tests/*.log ${LOG_DIR_ROOT}
mv *.log ${LOG_DIR_ROOT}
mv big_tests/*.log ${LOG_DIR_ROOT} || true

# cd so we don't include nested dirs in the archive (for example, PR/4366/236412)
cd "$LOG_DIR_ROOT/.."

# Zip to safe space
tar -czf "$LOG_ZIP" "$(basename "$LOG_DIR_ROOT")"

cd "$REPO_DIR"

# Slightly faster than removing
mv "$LOG_DIR_ROOT" /tmp/

# Compress big ct_reports
BIG_REPORTS_DIR="$(pwd)/big_tests/ct_report"
SMALL_REPORTS_DIR="$(pwd)/_build/test/logs"

if [ -f ${BIG_REPORTS_DIR}/index.html ]; then
cd ${BIG_REPORTS_DIR}
# Ignore GDPR extracted logs
# They are primarily empty files
tar \
--exclude='./ct_run*/*.logs/last_link.html' \
--exclude='./ct_run*/*.logs/last_name' \
--exclude='./ct_run*/*.unzipped' \
-czf "${CT_REPORTS_FULL}/big.tar.gz" .
fi

if [ -f ${SMALL_REPORTS_DIR}/index.html ]; then
cd ${SMALL_REPORTS_DIR}
tar \
--exclude='./ct_run*/*.logs/last_link.html' \
--exclude='./ct_run*/*.logs/last_name' \
-czf "${CT_REPORTS_FULL}/small.tar.gz" .
fi
9 changes: 6 additions & 3 deletions tools/publish-github-comment.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
#!/usr/bin/env bash

# TODO: publish comment with errors to a separate dedicated issue on GitHub,
# if Github Actions fails

source tools/helpers.sh

set -e
Expand Down Expand Up @@ -67,7 +70,7 @@ function rewrite_log_links_to_s3
local CT_REPORT=big_tests/ct_report
local CT_REPORT_ABS=$(./tools/abs_dirpath.sh "$CT_REPORT")
local CT_REPORTS=$(ct_reports_dir)
local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big"
local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})"
cp /tmp/ct_markdown /tmp/ct_markdown_original
replace_string "$CT_REPORT_ABS" "$BIG_TESTS_URL" /tmp/ct_markdown
# URL escape for s3_reports.html script
Expand All @@ -92,15 +95,15 @@ function small_suite_path
function ct_run_url
{
local CT_REPORTS=$(ct_reports_dir)
local BIG_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/big"
local BIG_TESTS_URL="$(archive_reader_url big ${CT_REPORTS})"
local RUN_PART=$(echo "$(last_ct_run_name)" | sed "s/@/%40/g")
echo "$BIG_TESTS_URL/$RUN_PART/index.html"
}

function ct_small_url
{
local CT_REPORTS=$(ct_reports_dir)
local SMALL_TESTS_URL="$(direct_s3_url ${CT_REPORTS})/small"
local SMALL_TESTS_URL="$(archive_reader_url small ${CT_REPORTS})"
local SUFFIX=$(small_suite_path)
echo "$SMALL_TESTS_URL/$SUFFIX"
}
Expand Down
5 changes: 0 additions & 5 deletions tools/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,7 @@ while getopts ":p:s:e:c:h:" opt; do
done

source tools/common-vars.sh

if [ ${CIRCLECI} ]; then
source tools/circleci-helpers.sh
else
source tools/helpers.sh
fi

if [ "${AWS_SECRET_ACCESS_KEY}" ]; then
CT_REPORTS=$(ct_reports_dir)
Expand Down
Loading

0 comments on commit 1680efa

Please sign in to comment.