WIP Test bundle install #4
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Thor ML Test | ||
on: | ||
#workflow_call: | ||
workflow_dispatch: | ||
inputs: | ||
os: | ||
type: string | ||
description: 'Operating System' | ||
required: false | ||
default: 'ubuntu-22.04' | ||
asset-name: | ||
type: string | ||
description: 'Asset Name' | ||
required: false | ||
default: 'build-docker-package' | ||
dependencies: | ||
type: string | ||
description: 'Dependencies' | ||
required: false | ||
default: 'bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake xmlstarlet' | ||
push: | ||
pull_request: | ||
branches: | ||
- "master" | ||
- "candidate-*" | ||
- "!candidate-7.10.*" | ||
- "!candidate-7.8.*" | ||
- "!candidate-7.6.*" | ||
- "!candidate-7.4.*" | ||
- "!candidate-7.2.*" | ||
- "!candidate-7.0.*" | ||
- "!candidate-6.*" | ||
env: | ||
ML_SUPPRESS_WARNING_FILES: [ "RegressionTestModified.ecl" "ClassificationTestModified.ecl" ] | ||
ML_EXCLUDE_FILES: "--ef ClassicTestModified.ecl,SVCTest.ecl,ClassificationTestModified.ecl" | ||
BUNDLES_TO_TEST: ["ML_Core","PBblas","GLM","GNN","DBSCAN","LearningTrees","TextVectors","KMeans","SupportVectorMachines","LinearRegression","LogisticRegression"] | ||
jobs: | ||
main: | ||
runs-on: ubuntu-22.04 | ||
steps: | ||
- name: Free additional disk space (remove Android SDK + Tools) | ||
run: | | ||
sudo rm -rf /usr/local/lib/android | ||
# - name: Download Package | ||
# uses: actions/download-artifact@v3 | ||
# with: | ||
# name: ${{ inputs.asset-name }} | ||
# path: ${{ inputs.asset-name }} | ||
- name: Download artifact | ||
id: download-artifact | ||
uses: dawidd6/action-download-artifact@v3 | ||
with: | ||
repo: hpcc-systems/HPCC-Platform | ||
name: docker-ubuntu-22_04 | ||
workflow: build-vcpkg.yml | ||
workflow_conclusion: success | ||
check_artifacts: true | ||
- name: Install Dependencies | ||
shell: "bash" | ||
run: | | ||
sudo apt-get update | ||
sudo apt-get install -y \ | ||
git \ | ||
wget \ | ||
net-tools \ | ||
tzdata \ | ||
unzip \ | ||
xvfb \ | ||
libxi6 \ | ||
default-jdk \ | ||
gdb \ | ||
bison flex build-essential binutils-dev curl lsb-release libcppunit-dev python3-dev default-jdk r-base-dev r-cran-rcpp r-cran-rinside r-cran-inline pkg-config libtool autotools-dev automake git cmake xmlstarlet | ||
- name: Install Package | ||
shell: "bash" | ||
run: | | ||
sudo apt-get install -y -f ./*.deb | ||
sudo chown -R $USER:$USER /opt/HPCCSystems | ||
sudo xmlstarlet ed -L -u 'Environment/Software/ThorCluster/@slavesPerNode' -v 2 -u 'Environment/Software/ThorCluster/@channelsPerSlave' -v 1 /etc/HPCCSystems/environment.xml | ||
- name: Install ML Dependencies | ||
shell: "bash" | ||
run: | | ||
sudo apt install -y libsvm-dev libsvm-tools | ||
#sudo pip install GLM GNN Kmeans tensorflow numpy keras | ||
#wget https://cdn.hpccsystems.com/releases/CE-Candidate-9.6.12/bin/clienttools/hpccsystems-clienttools-community_9.6.12-1jammy_amd64_withsymbols.deb | ||
#wget https://cdn.hpccsystems.com/releases/CE-Candidate-9.6.10/bin/plugins/hpccsystems-plugin-eclblas_9.6.10-1jammy_amd64_withsymbols.deb | ||
#sudo dpkg -i --force-all hpccsystems-plugin-eclblas_9.6.10-1jammy_amd64_withsymbols.deb | ||
continue-on-error: true | ||
- name: Start HPCC-Platform | ||
shell: "bash" | ||
run: | | ||
export LANG="en_US.UTF-8" | ||
sudo update-locale | ||
sudo /etc/init.d/hpcc-init start | ||
- name: Get test from Github | ||
shell: "bash" | ||
run: | | ||
set -x | ||
BUNDLES_TO_TEST=( "ML_Core" "PBblas" "DBSCAN" "TextVectors" "PBblas" "GLM" "GNN" "LearningTrees" "KMeans" "SupportVectorMachines" "LinearRegression" "LogisticRegression" ) | ||
BUNDLES_COUNT=${#BUNDLES_TO_TEST[@]} | ||
for ((i=0; i<$BUNDLES_COUNT; i++)) | ||
do | ||
echo "reached here" | ||
BUNDLE_NAME=${BUNDLES_TO_TEST[i]} | ||
BUNDLE_REPO="https://github.com/hpcc-systems/${BUNDLES_TO_TEST[i]}.git" | ||
INSTALL_CMD="ecl bundle install -v --update --force $ML_INSTALL_EXTRA ${BUNDLE_REPO}" | ||
tryCountMax=5 | ||
tryCount=$tryCountMax | ||
tryDelay=1m | ||
while true | ||
do | ||
echo "reached in while too" | ||
cRes=$( ${INSTALL_CMD} 2>&1 ) | ||
retCode=$? | ||
if [[ $retCode -ne 0 ]] | ||
then | ||
tryCount=$(( $tryCount-1 )) | ||
echo "123reached321" | ||
if [[ $tryCount -ne 0 ]] | ||
then | ||
echo "123reached" | ||
sleep ${tryDelay} | ||
continue | ||
else | ||
break; | ||
fi | ||
else | ||
BUNDLE_VERSION=$( echo "${cRes}" | egrep "^$BUNDLE_NAME" | awk '{ print $2 }' ) | ||
break | ||
fi | ||
done | ||
done | ||
- name: Run Tests | ||
id: run | ||
shell: "bash" | ||
working-directory: /home/runner/.HPCCSystems/bundles/_versions/ | ||
run: | | ||
ProcessLog() | ||
{ | ||
BUNDLE=$1 | ||
TARGET=$2 | ||
logfilename=$( ls -clr /home/runner/HPCCSystems-regression/log/thor.*.log | head -1 | awk '{ print $9 }' ) | ||
failed=$(cat ${logfilename} | sed -n "s/^[[:space:]]*Failure:[[:space:]]*\([0-9]*\)[[:space:]]*$/\1/p") | ||
if [[ "$failed" -ne 0 ]] | ||
then | ||
echo "Bundle : ${BUNDLE}" >> /home/runner/HPCCSystems-regression/log/Failed_test.summary | ||
cat ${logfilename} >> /home/runner/HPCCSystems-regression/log/Failed_test.summary | ||
echo "uploadArtifact=true" >> $GITHUB_OUTPUT | ||
fi | ||
# Rename result log file to name of the bundle | ||
logname=$(basename $logfilename) | ||
bundlelogfilename=${logname//thor/$BUNDLE} | ||
printf "%s, %s\n" "$logname" "$bundlelogfilename" | ||
mv -v $logfilename /home/runner/HPCCSystems-regression/ml-$bundlelogfilename | ||
} | ||
BUNDLES_TO_TEST=( "ML_Core" "PBblas" "DBSCAN" "TextVectors" "PBblas" "GLM" "GNN" "LearningTrees" "KMeans" "SupportVectorMachines" "LinearRegression" "LogisticRegression" ) | ||
while read bundle | ||
do | ||
bundleRunPath=${bundle%/ecl} # remove '/ecl' from the end of the $bundle | ||
bundlePath=${bundleRunPath%/OBTTests}; # remove '/OBTTests' from the end of the $bundleRunPath if exists | ||
bundleName=${bundlePath%/test} # remove '/test' from the end of the $bundlePath if exists | ||
bundleName=$(basename $bundleName ) # remove path from $bundleName | ||
if [[ "$bundle" =~ "LearningTrees" ]] | ||
then | ||
# add a warning supression parameter in the file | ||
for file in "RegressionTestModified.ecl" "ClassificationTestModified.ecl" | ||
do | ||
if [[ $( egrep -c '#ONWARNING\(30004' $bundle/$file ) -eq 0 ]] | ||
then | ||
pushd $bundle | ||
cp -fv $file $file-back | ||
# Insert a comment and the "#ONWARNING" after the Copyright header | ||
sed -i '/## \*\//a \\n// Patched by the bundleTest on '"$( date '+%Y.%m.%d %H:%M:%S')"' \n#ONWARNING(30004, ignore); // Do not report execute time skew warning' $file | ||
popd | ||
fi | ||
done | ||
fi | ||
if [[ ! "${BUNDLES_TO_TEST[*]}" =~ "$bundleName" ]] | ||
then | ||
continue | ||
fi | ||
pushd $bundleRunPath | ||
/opt/HPCCSystems/testing/regress/ecl-test run -t thor --config /opt/HPCCSystems/testing/regress/ecl-test.json --timeout 3600 -fthorConnectTimeout=3600 --pq 1 --ef ClassicTestModified.ecl,SVCTest.ecl,ClassificationTestModified.ecl | ||
retCode=$( echo $? ) | ||
if [ ${retCode} -eq 0 ] | ||
then | ||
ProcessLog "$bundleName" "thor" | ||
fi | ||
popd | ||
done< <(find . -iname 'ecl' -type d | sort ) | ||
# - name: Generate ZAP files | ||
# if: ${{ ! inputs.Generate-Zap == '' }} | ||
# run: | | ||
# IFS=' ' read -a ML_GENERATE_ZAP_FOR <<< ${{ inputs.Generate-Zap }} | ||
# if [ ${#ML_GENERATE_ZAP_FOR[@]} -ne 0 ] | ||
# then | ||
# for test in ${ML_GENERATE_ZAP_FOR[*]} | ||
# do | ||
# wuid=$(ecl getwuid -n $test --limit 1) | ||
# if [ -n $wuid ] | ||
# then | ||
# ecl zapgen $wuid --path /home/runner/HPCCSystems-regression/zap --inc-thor-slave-logs | ||
# fi | ||
# done | ||
# fi | ||
- name: Check for Core files | ||
run: | | ||
NUM_OF_ML_CORES=( $(sudo find /var/lib/HPCCSystems/ -iname 'core*' -mtime -1 -type f -exec printf "%s\n" '{}' \; ) ) | ||
GDB_CMD='gdb --batch --quiet -ex "set interactive-mode off" -ex "echo \n Backtrace for all threads\n==========================" -ex "thread apply all bt" -ex "echo \n Registers:\n==========================\n" -ex "info reg" -ex "echo \n Disas:\n==========================\n" -ex "disas" -ex "quit" ' | ||
if [ ${#NUM_OF_ML_CORES[@]} -ne 0 ] | ||
then | ||
for core in ${NUM_OF_ML_CORES[@]} | ||
do | ||
base=$( dirname $core ) | ||
lastSubdir=${base##*/} | ||
comp=${lastSubdir##my} | ||
#sudo gdb --batch --quiet -ex "set interactive-mode off" -ex "thread apply all bt" -ex "quit" "/opt/HPCCSystems/bin/${comp}" $core | sudo tee "$core.trace" 2>&1 | ||
sudo ${GDB_CMD} "/opt/HPCCSystems/bin/${comp}" $core | sudo tee "$core.trace" 2>&1 | ||
cp "$core.trace" /home/runner/HPCCSystems-regression/log/ | ||
done | ||
# - name: Get test stat | ||
# if: ${{ inputs.Get-Stat }} | ||
# run: | | ||
# ./QueryStat2.py -p /home/runner/HPCCSystems-regression/log/ -d '' -a --timestamp --compileTimeDetails 1 --graphTimings --allGraphItems --addHeader | ||
- name: ml-thor-test-logs-artifact | ||
if: ${{ failure() || cancelled() || steps.run.outputs.uploadArtifact }} | ||
uses: actions/upload-artifact@v3 | ||
with: | ||
name: ${{ inputs.asset-name }}-thor-test-logs | ||
path: | | ||
/home/runner/HPCCSystems-regression/log/* | ||
/home/runner/HPCCSystems-regression/zap/* | ||
if-no-files-found: ignore |