From cdcb1085a5a7695f1f116c2f632f131174441b5d Mon Sep 17 00:00:00 2001 From: Ambrus Simon Date: Mon, 30 Oct 2017 16:44:20 -0500 Subject: [PATCH 1/5] Enable build/lint/unit/integ skip and py.test args for individual test running --- .pylintrc | 2 +- TESTING.md | 18 ++- test/bin/lint.sh | 21 --- test/bin/run-integration-tests.sh | 78 ----------- test/bin/run-tests-docker.sh | 152 ++++++++++----------- test/bin/run-tests-osx.sh | 65 --------- test/bin/run-tests-ubuntu.sh | 217 +++++++++++++++++++++--------- test/bin/run-unit-tests.sh | 13 -- 8 files changed, 242 insertions(+), 324 deletions(-) delete mode 100755 test/bin/lint.sh delete mode 100755 test/bin/run-integration-tests.sh delete mode 100755 test/bin/run-tests-osx.sh delete mode 100755 test/bin/run-unit-tests.sh diff --git a/.pylintrc b/.pylintrc index bc52bc40b..851947877 100644 --- a/.pylintrc +++ b/.pylintrc @@ -212,7 +212,7 @@ indent-string=' ' indent-after-paren=4 # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= +expected-line-ending-format=LF [LOGGING] diff --git a/TESTING.md b/TESTING.md index 249f0a2ba..0e8eedc9a 100644 --- a/TESTING.md +++ b/TESTING.md @@ -1,21 +1,31 @@ ## Run the tests -### OSX -``` -./test/bin/run-tests-osx.sh -``` ### Ubuntu +Run automated tests: ``` # Follow installation instructions in README first . /runtime/bin/activate # Or wherever your scitran virtualenv is ./test/bin/setup-integration-tests-ubuntu.sh ./test/bin/run-tests-ubuntu.sh ``` +* To skip linting, use `--no-lint` (`-L`) +* To skip unit tests, use `--no-unit` (`-U`) +* To skip integration tests, use `--no-integ` (`-I`) +* To pass any arguments to `py.test`, use `-- PYTEST_ARGS` ### Docker +Build scitran-core image and run automated tests in a docker container: ``` ./test/bin/run-tests-docker.sh ``` +* To skip building the image, use `--no-build` (`-B`) +* To pass any arguments to `run-tests-ubuntu.sh`, use `-- TEST_ARGS` + +#### Example +Without rebuilding the image, run only unit tests matching `foo`, use the highest verbosity level for test output and jump into a python debugger session in case an assertion fails: +``` +./test/bin/run-tests-docker.sh -B -- -L -I -- -k foo -vvv --pdb +``` ### Tools - [abao](https://github.com/cybertk/abao/) diff --git a/test/bin/lint.sh b/test/bin/lint.sh deleted file mode 100755 index f5b843671..000000000 --- a/test/bin/lint.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash - -set -eu - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - -echo "Checking for files with DOS encoding:" -(! git ls-files | xargs file | grep -I "with CRLF line terminators") - -echo "Checking for files with windows-style newlines:" -(! git ls-files | xargs grep -I $'\r') - -echo "Running pylint ..." -# TODO: Enable Refactor and Convention reports -pylint --reports=no --disable=C,R,W0312,W0141,W0110 api - -#echo -# -#echo "Running pep8 ..." -#pep8 --max-line-length=150 --ignore=E402 api diff --git a/test/bin/run-integration-tests.sh b/test/bin/run-integration-tests.sh deleted file mode 100755 index 5b9ee55af..000000000 --- a/test/bin/run-integration-tests.sh +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env bash -set -eu - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - -rm -f .coverage.integration-tests - -USAGE=" - Usage:\n - $0 \n - \n -" - -if [ "$#" -eq 4 ]; then - SCITRAN_SITE_API_URL=$1 - SCITRAN_PERSISTENT_DB_URI=$2 - SCITRAN_PERSISTENT_DB_LOG_URI=$3 - SCITRAN_CORE_DRONE_SECRET=$4 -else - echo "Wrong number of positional arguments" - echo $USAGE >&2 - exit 1 -fi - -echo "Connecting to API" -until $(curl --output /dev/null --silent --head --fail "$SCITRAN_SITE_API_URL"); do - printf '.' - sleep 1 -done - -# Remove __pycache__ directory for issue with __file__ attribute -# Due to running the tests on the host creating bytecode files -# Which have a mismatched __file__ attribute when loaded in docker container -rm -rf test/integration_tests/python/__pycache__ - -PYTHONPATH="$( pwd )" \ -SCITRAN_SITE_API_URL="$SCITRAN_SITE_API_URL" \ -SCITRAN_PERSISTENT_DB_URI="$SCITRAN_PERSISTENT_DB_URI" \ -SCITRAN_PERSISTENT_DB_LOG_URI="$SCITRAN_PERSISTENT_DB_LOG_URI" \ -SCITRAN_CORE_DRONE_SECRET="$SCITRAN_CORE_DRONE_SECRET" \ - py.test test/integration_tests/python - -# Create resources that Abao relies on: -# - user w/ api key -# - scitran group -# - test-group -# - test-project-1 (+analysis upload) -# - test-session-1 (+analysis upload) -# - test-acquisition-1 (+analysis upload) -# - test-case-gear -# - test-collection-1 (+analysis upload) -SCITRAN_SITE_API_URL="$SCITRAN_SITE_API_URL" \ -SCITRAN_CORE_DRONE_SECRET="$SCITRAN_CORE_DRONE_SECRET" \ -SCITRAN_PERSISTENT_DB_URI="$SCITRAN_PERSISTENT_DB_URI" \ - python test/integration_tests/abao/load_fixture.py - -set +u -# If no VIRTUAL_ENV, make sure /usr/local/bin is in the path -if [ -z "$VIRTUAL_ENV" ]; then - PATH="/usr/local/bin:$PATH" - npm install test/integration_tests -else - npm install --global test/integration_tests -fi -set -u - -PATH="$(npm bin):$PATH" - -# Allow us to require modules from package.json, -# since abao_test_hooks.js is not being called from the package directory -integration_test_node_modules="$( pwd )/node_modules/scitran-core-integration-tests/node_modules" - -# Have to change into definitions directory to resolve -# relative $ref's in the jsonschema's -pushd raml/schemas/definitions -NODE_PATH="$integration_test_node_modules" abao ../../api.raml "--server=$SCITRAN_SITE_API_URL" "--hookfiles=../../../test/integration_tests/abao/abao_test_hooks.js" -popd diff --git a/test/bin/run-tests-docker.sh b/test/bin/run-tests-docker.sh index f60ce816e..dd7349be2 100755 --- a/test/bin/run-tests-docker.sh +++ b/test/bin/run-tests-docker.sh @@ -1,85 +1,81 @@ #!/usr/bin/env bash -set -e - +set -eu unset CDPATH cd "$( dirname "${BASH_SOURCE[0]}" )/../.." -IMAGE_NAME_SCITRAN_CORE="scitran-core:run-tests" -IMAGE_NAME_MONGO=mongo -CONTAINER_NAME_MONGO=scitran-core-test-mongo -CONTAINER_NAME_SCITRAN_CORE=scitran-core-test-uwsgi - - -USAGE=" - Run scitran-core tests using docker -\n - Usage:\n - \n - --help: print help and exit\n - -b, --build-image: Rebuild scitran-core base image\n - -L, --no-lint: Skip linter\n - -" - -SCITRAN_RUN_LINT="true" -BUILD_IMAGE="false" - -while [ "$#" -gt 0 ]; do - key="$1" - case $key in - --help) - echo -e $USAGE >&2 - exit 1 - ;; - -b|--build-image) - BUILD_IMAGE="true" - ;; - -L|--no-lint) - SCITRAN_RUN_LINT="false" - ;; - *) - echo "Invalid option: $key" >&2 - echo -e $USAGE >&2 - exit 1 - ;; - esac - shift -done - - -clean_up () { - # Copy coverage file to host for possible further reporting - docker cp "$CONTAINER_NAME_SCITRAN_CORE":/var/scitran/code/api/.coverage .coverage || true - # Stop and remove containers - docker rm -v -f "$CONTAINER_NAME_MONGO" - docker rm -v -f "$CONTAINER_NAME_SCITRAN_CORE" + +function usage() { +cat >&2 < 0 ]]; do + case "$1" in + -B|--no-build) DOCKER_BUILD=false; ;; + -h|--help) usage; exit 0 ;; + --) TEST_ARGS="${@:2}"; break ;; + *) echo "Invalid argument: $1" >&2; usage; exit 1 ;; + esac + shift + done + + if ${DOCKER_BUILD}; then + echo "Building scitran-core:run-tests ..." + docker build -t scitran-core:run-tests . + fi + + docker network create scitran-core-test-network + + # Launch Mongo instance + docker run -d \ + --name scitran-core-test-mongo \ + --network scitran-core-test-network \ + mongo + + # Execute tests + docker run -it \ + --name scitran-core-test-uwsgi \ + --network scitran-core-test-network \ + -e SCITRAN_PERSISTENT_DB_URI=mongodb://scitran-core-test-mongo:27017/scitran \ + -e SCITRAN_PERSISTENT_DB_LOG_URI=mongodb://scitran-core-test-mongo:27017/logs \ + -v $(pwd):/var/scitran/code/api \ + --entrypoint bash \ + scitran-core:run-tests \ + /var/scitran/code/api/test/bin/run-tests-ubuntu.sh \ + $TEST_ARGS +} + + +function clean_up() { + export TEST_RESULT_CODE=$? + set +e + + # Copy coverage file to host for possible further reporting + docker cp scitran-core-test-uwsgi:/var/scitran/code/api/.coverage .coverage + + # Spin down dependencies + docker rm -f -v scitran-core-test-uwsgi + docker rm -f -v scitran-core-test-mongo + docker network rm scitran-core-test-network + exit $TEST_RESULT_CODE } + trap clean_up EXIT -if [[ $( docker images "$IMAGE_NAME_SCITRAN_CORE" | tail -n +2 ) == "" ]]; then - echo "$IMAGE_NAME_SCITRAN_CORE image not found. Building" - BUILD_IMAGE="true" -fi - -if [ "$BUILD_IMAGE" == "true" ]; then - docker build -t "$IMAGE_NAME_SCITRAN_CORE" . -fi - -# Sub-shell the test steps to make the functionality of the trap execution explicit -( -# Launch Mongo isinstance -docker run --name "$CONTAINER_NAME_MONGO" -d "$IMAGE_NAME_MONGO" - -# Execute tests -docker run \ - -it \ - --name "$CONTAINER_NAME_SCITRAN_CORE"\ - -e "SCITRAN_PERSISTENT_DB_URI=mongodb://$CONTAINER_NAME_MONGO:27017/scitran" \ - -e "SCITRAN_PERSISTENT_DB_LOG_URI=mongodb://$CONTAINER_NAME_MONGO:27017/logs" \ - -e "SCITRAN_RUN_LINT=$SCITRAN_RUN_LINT" \ - --link "$CONTAINER_NAME_MONGO" \ - -v $(pwd):/var/scitran/code/api \ - --entrypoint bash \ - "$IMAGE_NAME_SCITRAN_CORE" \ - /var/scitran/code/api/test/bin/run-tests-ubuntu.sh -) + +main "$@" diff --git a/test/bin/run-tests-osx.sh b/test/bin/run-tests-osx.sh deleted file mode 100755 index aff12d968..000000000 --- a/test/bin/run-tests-osx.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env bash - -set -e - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - -set -a -# Use port 9003 to hopefully avoid conflicts -SCITRAN_RUNTIME_PATH=${SCITRAN_RUNTIME_PATH:-"$( pwd )/runtime"} -SCITRAN_PERSISTENT_DB_PORT=9003 -SCITRAN_PERSISTENT_DB_URI="mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/integration-tests" -SCITRAN_PERSISTENT_DB_LOG_URI=${SCITRAN_PERSISTENT_DB_LOG_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/logs"} -SCITRAN_PERSISTENT_PATH="$( mktemp -d )" -SCITRAN_CORE_DRONE_SECRET=${SCITRAN_CORE_DRONE_SECRET:-$( openssl rand -base64 32 )} - -clean_up () { - kill $API_PID || true - wait 2> /dev/null - rm -rf "$SCITRAN_PERSISTENT_PATH" - - # NOTE on omit: cross-site feature unused and planned for removal - local OMIT="--omit api/centralclient.py" - echo -e "\nUNIT TEST COVERAGE:" - coverage report $OMIT --skip-covered - - coverage combine - echo -e "\nOVERALL COVERAGE:" - coverage report $OMIT --show-missing - coverage html $OMIT -} - -trap clean_up EXIT - -./bin/install-dev-osx.sh - -source $SCITRAN_RUNTIME_PATH/bin/activate # will fail with `set -u` - -# Install Node.js -if [ ! -f "$SCITRAN_RUNTIME_PATH/bin/node" ]; then - echo "Installing Node.js" - NODE_URL="https://nodejs.org/dist/v6.10.2/node-v6.10.2-darwin-x64.tar.gz" - curl $NODE_URL | tar xz -C $VIRTUAL_ENV --strip-components 1 -fi - -# Install testing dependencies -echo "Installing testing dependencies" -pip install --no-cache-dir -r "test/integration_tests/requirements-integration-test.txt" - -./test/bin/lint.sh api - -SCITRAN_CORE_DRONE_SECRET=$SCITRAN_CORE_DRONE_SECRET \ - ./test/bin/run-unit-tests.sh - -SCITRAN_RUNTIME_PORT=8081 \ - SCITRAN_CORE_DRONE_SECRET="$SCITRAN_CORE_DRONE_SECRET" \ - SCITRAN_RUNTIME_COVERAGE="true" \ - ./bin/run-dev-osx.sh -T -U -I & -API_PID=$! - -./test/bin/run-integration-tests.sh \ - "http://localhost:8081/api" \ - "$SCITRAN_PERSISTENT_DB_URI" \ - "$SCITRAN_PERSISTENT_DB_LOG_URI" \ - "$SCITRAN_CORE_DRONE_SECRET" diff --git a/test/bin/run-tests-ubuntu.sh b/test/bin/run-tests-ubuntu.sh index 372bb231e..f0e7b7989 100755 --- a/test/bin/run-tests-ubuntu.sh +++ b/test/bin/run-tests-ubuntu.sh @@ -1,76 +1,165 @@ #!/usr/bin/env bash -# -# Run all scripted tests from ubuntu 14.04 or later -# -# Assumes mongo db instance is accessible at localhost, unless -# SCITRAN_PERSISTENT_DB_URI or SCITRAN_PERSISTENT_DB_LOG_URI specify otherwise. -# -# Forces SCITRAN_CORE_ACCESS_LOG_ENABLED=true -# -# Variables: -# - See sample.config for options. -# - SCITRAN_PERSISTENT_DB_URI will cause SCITRAN_PERSISTENT_DB_PORT to be -# ignored. -# - SCITRAN_PERSISTENT_DB_LOG_URI will cause SCITRAN_PERSISTENT_DB_PORT to be -# ignored. - -set -e - +set -eu unset CDPATH cd "$( dirname "${BASH_SOURCE[0]}" )/../.." -SCITRAN_RUN_LINT=${SCITRAN_RUN_LINT:-"true"} -if [ "$SCITRAN_RUN_LINT" == "true" ]; then - ./test/bin/lint.sh api -fi +function usage() { +cat >&2 < /dev/null +Options: + -L, --no-lint Skip linting + -U, --no-unit Skip unit tests + -I, --no-integ Skip integration tests + -h, --help Print this help and exit + -- PYTEST_ARGS Arguments passed to py.test - # NOTE on omit: cross-site feature unused and planned for removal - local OMIT="--omit api/centralclient.py" - echo -e "\nUNIT TEST COVERAGE:" - coverage report $OMIT --skip-covered +Envvars: + SCITRAN_PERSISTENT_DB_PORT (9001) + SCITRAN_PERSISTENT_DB_URI (mongodb://localhost:9001/scitran) + SCITRAN_PERSISTENT_DB_LOG_URI (mongodb://localhost:9001/logs) - coverage combine - echo -e "\nOVERALL COVERAGE:" - coverage report $OMIT --show-missing - coverage html $OMIT +Assumes mongo db instance is accessible at localhost, unless +SCITRAN_PERSISTENT_DB_URI or SCITRAN_PERSISTENT_DB_LOG_URI specify otherwise. + +EOF +} + + +function main() { + local RUN_LINT=true + local RUN_UNIT=true + local RUN_INTEG=true + local PYTEST_ARGS= + + while [[ "$#" > 0 ]]; do + case "$1" in + -L|--no-lint) RUN_LINT=false ;; + -U|--no-unit) RUN_UNIT=false ;; + -I|--no-integ) RUN_INTEG=false ;; + -h|--help) usage; exit 0 ;; + --) PYTEST_ARGS="${@:2}"; break ;; + *) echo "Invalid argument: $1" >&2; usage; exit 1 ;; + esac + shift + done + + # Remove __pycache__ directories for issue with __file__ attribute due to + # running the tests on the host creating bytecode files hich have a + # mismatched __file__ attribute when loaded in docker container + rm -rf test/unit_tests/python/__pycache__ + rm -rf test/integration_tests/python/__pycache__ + + export PYTHONPATH="$(pwd)" + export SCITRAN_SITE_API_URL="http://localhost:8081/api" + export SCITRAN_PERSISTENT_DB_PORT=${SCITRAN_PERSISTENT_DB_PORT:-"9001"} + export SCITRAN_PERSISTENT_DB_URI=${SCITRAN_PERSISTENT_DB_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/scitran"} + export SCITRAN_PERSISTENT_DB_LOG_URI=${SCITRAN_PERSISTENT_DB_LOG_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/logs"} + export SCITRAN_PERSISTENT_PATH=`mktemp -d` + export SCITRAN_PERSISTENT_DATA_PATH="$SCITRAN_PERSISTENT_PATH/data" + export SCITRAN_CORE_DRONE_SECRET=${SCITRAN_CORE_DRONE_SECRET:-$( openssl rand -base64 32 )} + + if ${RUN_LINT}; then + echo "Running pylint ..." + # TODO Enable Refactor and Convention reports + # TODO Move --disable into rc + pylint --reports=no --disable=C,R,W0312,W0141,W0110 api + + # echo "Running pep8 ..." + # pep8 --max-line-length=150 --ignore=E402 api + fi + + if ${RUN_UNIT}; then + echo "Running unit tests ..." + rm -f .coverage + py.test --cov=api --cov-report= test/unit_tests/python $PYTEST_ARGS + fi + + if ${RUN_INTEG}; then + echo "Running integration tests ..." + uwsgi --http "localhost:8081" --master --http-keepalive \ + --so-keepalive --add-header "Connection: Keep-Alive" \ + --processes 1 --threads 1 \ + --enable-threads \ + --wsgi-file bin/api.wsgi \ + --die-on-term \ + --logformat '%(addr) - %(user) [%(ltime)] "%(method) %(uri) %(proto)" %(status) %(size) "%(referer)" "%(uagent)" request_id=%(request_id)' \ + --env "SCITRAN_PERSISTENT_DB_URI=$SCITRAN_PERSISTENT_DB_URI" \ + --env "SCITRAN_PERSISTENT_DB_LOG_URI=$SCITRAN_PERSISTENT_DB_LOG_URI" \ + --env "SCITRAN_PERSISTENT_PATH=$SCITRAN_PERSISTENT_PATH" \ + --env "SCITRAN_PERSISTENT_DATA_PATH=$SCITRAN_PERSISTENT_DATA_PATH" \ + --env "SCITRAN_CORE_DRONE_SECRET=$SCITRAN_CORE_DRONE_SECRET" \ + --env "SCITRAN_RUNTIME_COVERAGE=true" \ + --env "SCITRAN_CORE_ACCESS_LOG_ENABLED=true" & + export API_PID=$! + + echo "Connecting to API" + until $(curl --output /dev/null --silent --head --fail "$SCITRAN_SITE_API_URL"); do + printf '.' + sleep 1 + done + + py.test test/integration_tests/python $PYTEST_ARGS + + + # Create resources that Abao relies on + python test/integration_tests/abao/load_fixture.py + + # If no VIRTUAL_ENV, make sure /usr/local/bin is in the path + if [[ -z "${VIRTUAL_ENV:-}" ]]; then + PATH="/usr/local/bin:$PATH" + npm install test/integration_tests + else + npm install --global test/integration_tests + fi + + PATH="$(npm bin):$PATH" + + # Allow us to require modules from package.json, + # since abao_test_hooks.js is not being called from the package directory + integration_test_node_modules="$(pwd)/node_modules/scitran-core-integration-tests/node_modules" + + # Have to change into definitions directory to resolve + # relative $ref's in the jsonschema's + pushd raml/schemas/definitions + NODE_PATH="$integration_test_node_modules" abao ../../api.raml "--server=$SCITRAN_SITE_API_URL" "--hookfiles=../../../test/integration_tests/abao/abao_test_hooks.js" + popd + fi +} + + +function clean_up () { + export TEST_RESULT_CODE=$? + set +e + + echo + echo "Test return code = $TEST_RESULT_CODE" + + if [[ -n "${API_PID:-}" ]]; then + # Killing uwsgi + kill $API_PID || true + wait 2> /dev/null + fi + + if [[ "${TEST_RESULT_CODE}" == "0" ]]; then + echo + echo "UNIT TEST COVERAGE:" + coverage report --skip-covered + echo + echo "OVERALL COVERAGE:" + coverage combine + coverage report --show-missing + coverage html + fi + + exit $TEST_RESULT_CODE } trap clean_up EXIT -API_BASE_URL="http://localhost:8081/api" -SCITRAN_PERSISTENT_DB_PORT=${SCITRAN_PERSISTENT_DB_PORT:-"9001"} -SCITRAN_PERSISTENT_DB_URI=${SCITRAN_PERSISTENT_DB_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/scitran"} -SCITRAN_PERSISTENT_DB_LOG_URI=${SCITRAN_PERSISTENT_DB_LOG_URI:-"mongodb://localhost:$SCITRAN_PERSISTENT_DB_PORT/logs"} -SCITRAN_PERSISTENT_PATH=`mktemp -d` -SCITRAN_PERSISTENT_DATA_PATH="$SCITRAN_PERSISTENT_PATH/data" -SCITRAN_CORE_DRONE_SECRET=${SCITRAN_CORE_DRONE_SECRET:-$( openssl rand -base64 32 )} - -SCITRAN_CORE_DRONE_SECRET=$SCITRAN_CORE_DRONE_SECRET \ - ./test/bin/run-unit-tests.sh - -uwsgi --http "localhost:8081" --master --http-keepalive \ - --so-keepalive --add-header "Connection: Keep-Alive" \ - --processes 1 --threads 1 \ - --enable-threads \ - --wsgi-file bin/api.wsgi \ - --die-on-term \ - --logformat '%(addr) - %(user) [%(ltime)] "%(method) %(uri) %(proto)" %(status) %(size) "%(referer)" "%(uagent)" request_id=%(request_id)' \ - --env "SCITRAN_PERSISTENT_DB_URI=$SCITRAN_PERSISTENT_DB_URI" \ - --env "SCITRAN_PERSISTENT_DB_LOG_URI=$SCITRAN_PERSISTENT_DB_LOG_URI" \ - --env "SCITRAN_PERSISTENT_PATH=$SCITRAN_PERSISTENT_PATH" \ - --env "SCITRAN_PERSISTENT_DATA_PATH=$SCITRAN_PERSISTENT_DATA_PATH" \ - --env "SCITRAN_CORE_DRONE_SECRET=$SCITRAN_CORE_DRONE_SECRET" \ - --env 'SCITRAN_RUNTIME_COVERAGE=true' \ - --env 'SCITRAN_CORE_ACCESS_LOG_ENABLED=true' & -API_PID=$! - -./test/bin/run-integration-tests.sh \ - "$API_BASE_URL" \ - "$SCITRAN_PERSISTENT_DB_URI" \ - "$SCITRAN_PERSISTENT_DB_LOG_URI" \ - "$SCITRAN_CORE_DRONE_SECRET" + +main "$@" diff --git a/test/bin/run-unit-tests.sh b/test/bin/run-unit-tests.sh deleted file mode 100755 index dd544a64a..000000000 --- a/test/bin/run-unit-tests.sh +++ /dev/null @@ -1,13 +0,0 @@ -set -e - -unset CDPATH -cd "$( dirname "${BASH_SOURCE[0]}" )/../.." - -# Remove __pycache__ directory for issue with __file__ attribute -# Due to running the tests on the host creating bytecode files -# Which have a mismatched __file__ attribute when loaded in docker container -rm -rf test/unit_tests/python/__pycache__ - -rm -f .coverage - -PYTHONPATH="$( pwd )" py.test --cov=api --cov-report= test/unit_tests/python From 9d2834f078961b2c0da7908bcf9b7db0e9540b84 Mon Sep 17 00:00:00 2001 From: Ambrus Simon Date: Wed, 8 Nov 2017 10:35:06 +0100 Subject: [PATCH 2/5] Add --no-abao, --no-report and review fixes --- TESTING.md | 7 ++++-- test/bin/run-tests-docker.sh | 6 +++--- test/bin/run-tests-ubuntu.sh | 41 ++++++++++++++++++++++++++---------- 3 files changed, 38 insertions(+), 16 deletions(-) diff --git a/TESTING.md b/TESTING.md index 0e8eedc9a..4e2684afa 100644 --- a/TESTING.md +++ b/TESTING.md @@ -11,8 +11,11 @@ Run automated tests: * To skip linting, use `--no-lint` (`-L`) * To skip unit tests, use `--no-unit` (`-U`) * To skip integration tests, use `--no-integ` (`-I`) +* To skip abao tests, use `--no-abao` (`-A`) * To pass any arguments to `py.test`, use `-- PYTEST_ARGS` +See [py.test usage](https://docs.pytest.org/en/latest/usage.html) for more. + ### Docker Build scitran-core image and run automated tests in a docker container: ``` @@ -22,9 +25,9 @@ Build scitran-core image and run automated tests in a docker container: * To pass any arguments to `run-tests-ubuntu.sh`, use `-- TEST_ARGS` #### Example -Without rebuilding the image, run only unit tests matching `foo`, use the highest verbosity level for test output and jump into a python debugger session in case an assertion fails: +Without rebuilding the image, run only integration tests matching `foo`, use the highest verbosity level for test output and jump into a python debugger session in case an assertion fails: ``` -./test/bin/run-tests-docker.sh -B -- -L -I -- -k foo -vvv --pdb +./test/bin/run-tests-docker.sh -B -- -L -U -A -- -k foo -vvv --pdb ``` ### Tools diff --git a/test/bin/run-tests-docker.sh b/test/bin/run-tests-docker.sh index dd7349be2..0fb0f8135 100755 --- a/test/bin/run-tests-docker.sh +++ b/test/bin/run-tests-docker.sh @@ -34,6 +34,8 @@ function main() { shift done + trap clean_up EXIT + if ${DOCKER_BUILD}; then echo "Building scitran-core:run-tests ..." docker build -t scitran-core:run-tests . @@ -62,7 +64,7 @@ function main() { function clean_up() { - export TEST_RESULT_CODE=$? + local TEST_RESULT_CODE=$? set +e # Copy coverage file to host for possible further reporting @@ -75,7 +77,5 @@ function clean_up() { exit $TEST_RESULT_CODE } -trap clean_up EXIT - main "$@" diff --git a/test/bin/run-tests-ubuntu.sh b/test/bin/run-tests-ubuntu.sh index f0e7b7989..ce5952748 100755 --- a/test/bin/run-tests-ubuntu.sh +++ b/test/bin/run-tests-ubuntu.sh @@ -15,6 +15,8 @@ Options: -L, --no-lint Skip linting -U, --no-unit Skip unit tests -I, --no-integ Skip integration tests + -A, --no-abao Skip abao tests + -R, --no-report Skip coverage report -h, --help Print this help and exit -- PYTEST_ARGS Arguments passed to py.test @@ -34,13 +36,18 @@ function main() { local RUN_LINT=true local RUN_UNIT=true local RUN_INTEG=true + local RUN_ABAO=true local PYTEST_ARGS= + export RUN_REPORT=true + while [[ "$#" > 0 ]]; do case "$1" in - -L|--no-lint) RUN_LINT=false ;; - -U|--no-unit) RUN_UNIT=false ;; - -I|--no-integ) RUN_INTEG=false ;; + -L|--no-lint) RUN_LINT=false ;; + -U|--no-unit) RUN_UNIT=false ;; + -I|--no-integ) RUN_INTEG=false ;; + -A|--no-abao) RUN_ABAO=false ;; + -R|--no-report) RUN_REPORT=false ;; -h|--help) usage; exit 0 ;; --) PYTEST_ARGS="${@:2}"; break ;; *) echo "Invalid argument: $1" >&2; usage; exit 1 ;; @@ -48,6 +55,13 @@ function main() { shift done + if ! (${RUN_LINT} && ${RUN_UNIT} && ${RUN_INTEG} && ${RUN_ABAO}); then + # Skip coverage report if any tests are skipped + RUN_REPORT=false + fi + + trap clean_up EXIT + # Remove __pycache__ directories for issue with __file__ attribute due to # running the tests on the host creating bytecode files hich have a # mismatched __file__ attribute when loaded in docker container @@ -79,8 +93,8 @@ function main() { py.test --cov=api --cov-report= test/unit_tests/python $PYTEST_ARGS fi - if ${RUN_INTEG}; then - echo "Running integration tests ..." + if ${RUN_INTEG} || ${RUN_ABAO}; then + echo "Spinning up dependencies ..." uwsgi --http "localhost:8081" --master --http-keepalive \ --so-keepalive --add-header "Connection: Keep-Alive" \ --processes 1 --threads 1 \ @@ -102,10 +116,15 @@ function main() { printf '.' sleep 1 done + fi + if ${RUN_INTEG}; then + echo "Running integration tests ..." py.test test/integration_tests/python $PYTEST_ARGS + fi - + if ${RUN_ABAO}; then + echo "Running abao tests ..." # Create resources that Abao relies on python test/integration_tests/abao/load_fixture.py @@ -133,7 +152,7 @@ function main() { function clean_up () { - export TEST_RESULT_CODE=$? + local TEST_RESULT_CODE=$? set +e echo @@ -141,11 +160,11 @@ function clean_up () { if [[ -n "${API_PID:-}" ]]; then # Killing uwsgi - kill $API_PID || true + kill $API_PID wait 2> /dev/null fi - if [[ "${TEST_RESULT_CODE}" == "0" ]]; then + if ${RUN_REPORT} && [[ "${TEST_RESULT_CODE}" == "0" ]]; then echo echo "UNIT TEST COVERAGE:" coverage report --skip-covered @@ -154,12 +173,12 @@ function clean_up () { coverage combine coverage report --show-missing coverage html + else + echo "Some tests were skipped or failed, skipping coverage report" fi exit $TEST_RESULT_CODE } -trap clean_up EXIT - main "$@" From 84d26c026dc580a6fb2cd914f94aecfb42aafa0f Mon Sep 17 00:00:00 2001 From: Ambrus Simon Date: Wed, 8 Nov 2017 10:57:33 +0100 Subject: [PATCH 3/5] Move test/ to tests/ --- {test => tests}/bin/run-tests-docker.sh | 0 {test => tests}/bin/run-tests-ubuntu.sh | 0 .../bin/setup-integration-tests-ubuntu.sh | 0 {test => tests}/integration_tests/.npmignore | 0 .../integration_tests/abao/abao_test_hooks.js | 0 .../integration_tests/abao/load_fixture.py | 0 .../abao/test_files/engine-analyses-1.txt | 0 .../integration_tests/abao/test_files/notes.txt | 0 .../integration_tests/abao/test_files/test-1.dcm | Bin {test => tests}/integration_tests/package.json | 0 .../integration_tests/python/conftest.py | 0 .../integration_tests/python/test_access_log.py | 0 .../integration_tests/python/test_batch.py | 0 .../integration_tests/python/test_collection.py | 0 .../integration_tests/python/test_containers.py | 0 .../integration_tests/python/test_download.py | 0 .../integration_tests/python/test_errors.py | 0 .../integration_tests/python/test_gears.py | 0 .../integration_tests/python/test_groups.py | 0 .../integration_tests/python/test_handlers.py | 0 .../integration_tests/python/test_jobs.py | 0 .../integration_tests/python/test_notes.py | 0 .../integration_tests/python/test_permissions.py | 0 .../integration_tests/python/test_propagation.py | 0 .../integration_tests/python/test_reports.py | 0 .../integration_tests/python/test_resolver.py | 0 .../integration_tests/python/test_rules.py | 0 .../integration_tests/python/test_tags.py | 0 .../integration_tests/python/test_uploads.py | 0 .../integration_tests/python/test_users.py | 0 .../requirements-integration-test.txt | 0 {test => tests}/unit_tests/python/conftest.py | 0 {test => tests}/unit_tests/python/test_auth.py | 0 {test => tests}/unit_tests/python/test_config.py | 0 .../unit_tests/python/test_dataexplorer.py | 0 .../unit_tests/python/test_db_upgrade.py | 0 {test => tests}/unit_tests/python/test_files.py | 0 {test => tests}/unit_tests/python/test_gear_util.py | 0 {test => tests}/unit_tests/python/test_key.py | 0 {test => tests}/unit_tests/python/test_request.py | 0 {test => tests}/unit_tests/python/test_rules.py | 0 {test => tests}/unit_tests/python/test_util.py | 0 .../unit_tests/python/test_validators.py | 0 {test => tests}/unit_tests/python/test_web_start.py | 0 44 files changed, 0 insertions(+), 0 deletions(-) rename {test => tests}/bin/run-tests-docker.sh (100%) rename {test => tests}/bin/run-tests-ubuntu.sh (100%) rename {test => tests}/bin/setup-integration-tests-ubuntu.sh (100%) rename {test => tests}/integration_tests/.npmignore (100%) rename {test => tests}/integration_tests/abao/abao_test_hooks.js (100%) rename {test => tests}/integration_tests/abao/load_fixture.py (100%) rename {test => tests}/integration_tests/abao/test_files/engine-analyses-1.txt (100%) rename {test => tests}/integration_tests/abao/test_files/notes.txt (100%) rename {test => tests}/integration_tests/abao/test_files/test-1.dcm (100%) rename {test => tests}/integration_tests/package.json (100%) rename {test => tests}/integration_tests/python/conftest.py (100%) rename {test => tests}/integration_tests/python/test_access_log.py (100%) rename {test => tests}/integration_tests/python/test_batch.py (100%) rename {test => tests}/integration_tests/python/test_collection.py (100%) rename {test => tests}/integration_tests/python/test_containers.py (100%) rename {test => tests}/integration_tests/python/test_download.py (100%) rename {test => tests}/integration_tests/python/test_errors.py (100%) rename {test => tests}/integration_tests/python/test_gears.py (100%) rename {test => tests}/integration_tests/python/test_groups.py (100%) rename {test => tests}/integration_tests/python/test_handlers.py (100%) rename {test => tests}/integration_tests/python/test_jobs.py (100%) rename {test => tests}/integration_tests/python/test_notes.py (100%) rename {test => tests}/integration_tests/python/test_permissions.py (100%) rename {test => tests}/integration_tests/python/test_propagation.py (100%) rename {test => tests}/integration_tests/python/test_reports.py (100%) rename {test => tests}/integration_tests/python/test_resolver.py (100%) rename {test => tests}/integration_tests/python/test_rules.py (100%) rename {test => tests}/integration_tests/python/test_tags.py (100%) rename {test => tests}/integration_tests/python/test_uploads.py (100%) rename {test => tests}/integration_tests/python/test_users.py (100%) rename {test => tests}/integration_tests/requirements-integration-test.txt (100%) rename {test => tests}/unit_tests/python/conftest.py (100%) rename {test => tests}/unit_tests/python/test_auth.py (100%) rename {test => tests}/unit_tests/python/test_config.py (100%) rename {test => tests}/unit_tests/python/test_dataexplorer.py (100%) rename {test => tests}/unit_tests/python/test_db_upgrade.py (100%) rename {test => tests}/unit_tests/python/test_files.py (100%) rename {test => tests}/unit_tests/python/test_gear_util.py (100%) rename {test => tests}/unit_tests/python/test_key.py (100%) rename {test => tests}/unit_tests/python/test_request.py (100%) rename {test => tests}/unit_tests/python/test_rules.py (100%) rename {test => tests}/unit_tests/python/test_util.py (100%) rename {test => tests}/unit_tests/python/test_validators.py (100%) rename {test => tests}/unit_tests/python/test_web_start.py (100%) diff --git a/test/bin/run-tests-docker.sh b/tests/bin/run-tests-docker.sh similarity index 100% rename from test/bin/run-tests-docker.sh rename to tests/bin/run-tests-docker.sh diff --git a/test/bin/run-tests-ubuntu.sh b/tests/bin/run-tests-ubuntu.sh similarity index 100% rename from test/bin/run-tests-ubuntu.sh rename to tests/bin/run-tests-ubuntu.sh diff --git a/test/bin/setup-integration-tests-ubuntu.sh b/tests/bin/setup-integration-tests-ubuntu.sh similarity index 100% rename from test/bin/setup-integration-tests-ubuntu.sh rename to tests/bin/setup-integration-tests-ubuntu.sh diff --git a/test/integration_tests/.npmignore b/tests/integration_tests/.npmignore similarity index 100% rename from test/integration_tests/.npmignore rename to tests/integration_tests/.npmignore diff --git a/test/integration_tests/abao/abao_test_hooks.js b/tests/integration_tests/abao/abao_test_hooks.js similarity index 100% rename from test/integration_tests/abao/abao_test_hooks.js rename to tests/integration_tests/abao/abao_test_hooks.js diff --git a/test/integration_tests/abao/load_fixture.py b/tests/integration_tests/abao/load_fixture.py similarity index 100% rename from test/integration_tests/abao/load_fixture.py rename to tests/integration_tests/abao/load_fixture.py diff --git a/test/integration_tests/abao/test_files/engine-analyses-1.txt b/tests/integration_tests/abao/test_files/engine-analyses-1.txt similarity index 100% rename from test/integration_tests/abao/test_files/engine-analyses-1.txt rename to tests/integration_tests/abao/test_files/engine-analyses-1.txt diff --git a/test/integration_tests/abao/test_files/notes.txt b/tests/integration_tests/abao/test_files/notes.txt similarity index 100% rename from test/integration_tests/abao/test_files/notes.txt rename to tests/integration_tests/abao/test_files/notes.txt diff --git a/test/integration_tests/abao/test_files/test-1.dcm b/tests/integration_tests/abao/test_files/test-1.dcm similarity index 100% rename from test/integration_tests/abao/test_files/test-1.dcm rename to tests/integration_tests/abao/test_files/test-1.dcm diff --git a/test/integration_tests/package.json b/tests/integration_tests/package.json similarity index 100% rename from test/integration_tests/package.json rename to tests/integration_tests/package.json diff --git a/test/integration_tests/python/conftest.py b/tests/integration_tests/python/conftest.py similarity index 100% rename from test/integration_tests/python/conftest.py rename to tests/integration_tests/python/conftest.py diff --git a/test/integration_tests/python/test_access_log.py b/tests/integration_tests/python/test_access_log.py similarity index 100% rename from test/integration_tests/python/test_access_log.py rename to tests/integration_tests/python/test_access_log.py diff --git a/test/integration_tests/python/test_batch.py b/tests/integration_tests/python/test_batch.py similarity index 100% rename from test/integration_tests/python/test_batch.py rename to tests/integration_tests/python/test_batch.py diff --git a/test/integration_tests/python/test_collection.py b/tests/integration_tests/python/test_collection.py similarity index 100% rename from test/integration_tests/python/test_collection.py rename to tests/integration_tests/python/test_collection.py diff --git a/test/integration_tests/python/test_containers.py b/tests/integration_tests/python/test_containers.py similarity index 100% rename from test/integration_tests/python/test_containers.py rename to tests/integration_tests/python/test_containers.py diff --git a/test/integration_tests/python/test_download.py b/tests/integration_tests/python/test_download.py similarity index 100% rename from test/integration_tests/python/test_download.py rename to tests/integration_tests/python/test_download.py diff --git a/test/integration_tests/python/test_errors.py b/tests/integration_tests/python/test_errors.py similarity index 100% rename from test/integration_tests/python/test_errors.py rename to tests/integration_tests/python/test_errors.py diff --git a/test/integration_tests/python/test_gears.py b/tests/integration_tests/python/test_gears.py similarity index 100% rename from test/integration_tests/python/test_gears.py rename to tests/integration_tests/python/test_gears.py diff --git a/test/integration_tests/python/test_groups.py b/tests/integration_tests/python/test_groups.py similarity index 100% rename from test/integration_tests/python/test_groups.py rename to tests/integration_tests/python/test_groups.py diff --git a/test/integration_tests/python/test_handlers.py b/tests/integration_tests/python/test_handlers.py similarity index 100% rename from test/integration_tests/python/test_handlers.py rename to tests/integration_tests/python/test_handlers.py diff --git a/test/integration_tests/python/test_jobs.py b/tests/integration_tests/python/test_jobs.py similarity index 100% rename from test/integration_tests/python/test_jobs.py rename to tests/integration_tests/python/test_jobs.py diff --git a/test/integration_tests/python/test_notes.py b/tests/integration_tests/python/test_notes.py similarity index 100% rename from test/integration_tests/python/test_notes.py rename to tests/integration_tests/python/test_notes.py diff --git a/test/integration_tests/python/test_permissions.py b/tests/integration_tests/python/test_permissions.py similarity index 100% rename from test/integration_tests/python/test_permissions.py rename to tests/integration_tests/python/test_permissions.py diff --git a/test/integration_tests/python/test_propagation.py b/tests/integration_tests/python/test_propagation.py similarity index 100% rename from test/integration_tests/python/test_propagation.py rename to tests/integration_tests/python/test_propagation.py diff --git a/test/integration_tests/python/test_reports.py b/tests/integration_tests/python/test_reports.py similarity index 100% rename from test/integration_tests/python/test_reports.py rename to tests/integration_tests/python/test_reports.py diff --git a/test/integration_tests/python/test_resolver.py b/tests/integration_tests/python/test_resolver.py similarity index 100% rename from test/integration_tests/python/test_resolver.py rename to tests/integration_tests/python/test_resolver.py diff --git a/test/integration_tests/python/test_rules.py b/tests/integration_tests/python/test_rules.py similarity index 100% rename from test/integration_tests/python/test_rules.py rename to tests/integration_tests/python/test_rules.py diff --git a/test/integration_tests/python/test_tags.py b/tests/integration_tests/python/test_tags.py similarity index 100% rename from test/integration_tests/python/test_tags.py rename to tests/integration_tests/python/test_tags.py diff --git a/test/integration_tests/python/test_uploads.py b/tests/integration_tests/python/test_uploads.py similarity index 100% rename from test/integration_tests/python/test_uploads.py rename to tests/integration_tests/python/test_uploads.py diff --git a/test/integration_tests/python/test_users.py b/tests/integration_tests/python/test_users.py similarity index 100% rename from test/integration_tests/python/test_users.py rename to tests/integration_tests/python/test_users.py diff --git a/test/integration_tests/requirements-integration-test.txt b/tests/integration_tests/requirements-integration-test.txt similarity index 100% rename from test/integration_tests/requirements-integration-test.txt rename to tests/integration_tests/requirements-integration-test.txt diff --git a/test/unit_tests/python/conftest.py b/tests/unit_tests/python/conftest.py similarity index 100% rename from test/unit_tests/python/conftest.py rename to tests/unit_tests/python/conftest.py diff --git a/test/unit_tests/python/test_auth.py b/tests/unit_tests/python/test_auth.py similarity index 100% rename from test/unit_tests/python/test_auth.py rename to tests/unit_tests/python/test_auth.py diff --git a/test/unit_tests/python/test_config.py b/tests/unit_tests/python/test_config.py similarity index 100% rename from test/unit_tests/python/test_config.py rename to tests/unit_tests/python/test_config.py diff --git a/test/unit_tests/python/test_dataexplorer.py b/tests/unit_tests/python/test_dataexplorer.py similarity index 100% rename from test/unit_tests/python/test_dataexplorer.py rename to tests/unit_tests/python/test_dataexplorer.py diff --git a/test/unit_tests/python/test_db_upgrade.py b/tests/unit_tests/python/test_db_upgrade.py similarity index 100% rename from test/unit_tests/python/test_db_upgrade.py rename to tests/unit_tests/python/test_db_upgrade.py diff --git a/test/unit_tests/python/test_files.py b/tests/unit_tests/python/test_files.py similarity index 100% rename from test/unit_tests/python/test_files.py rename to tests/unit_tests/python/test_files.py diff --git a/test/unit_tests/python/test_gear_util.py b/tests/unit_tests/python/test_gear_util.py similarity index 100% rename from test/unit_tests/python/test_gear_util.py rename to tests/unit_tests/python/test_gear_util.py diff --git a/test/unit_tests/python/test_key.py b/tests/unit_tests/python/test_key.py similarity index 100% rename from test/unit_tests/python/test_key.py rename to tests/unit_tests/python/test_key.py diff --git a/test/unit_tests/python/test_request.py b/tests/unit_tests/python/test_request.py similarity index 100% rename from test/unit_tests/python/test_request.py rename to tests/unit_tests/python/test_request.py diff --git a/test/unit_tests/python/test_rules.py b/tests/unit_tests/python/test_rules.py similarity index 100% rename from test/unit_tests/python/test_rules.py rename to tests/unit_tests/python/test_rules.py diff --git a/test/unit_tests/python/test_util.py b/tests/unit_tests/python/test_util.py similarity index 100% rename from test/unit_tests/python/test_util.py rename to tests/unit_tests/python/test_util.py diff --git a/test/unit_tests/python/test_validators.py b/tests/unit_tests/python/test_validators.py similarity index 100% rename from test/unit_tests/python/test_validators.py rename to tests/unit_tests/python/test_validators.py diff --git a/test/unit_tests/python/test_web_start.py b/tests/unit_tests/python/test_web_start.py similarity index 100% rename from test/unit_tests/python/test_web_start.py rename to tests/unit_tests/python/test_web_start.py From 63a27be84e3ea4d8530085de8cca2512549a50fe Mon Sep 17 00:00:00 2001 From: Ambrus Simon Date: Wed, 8 Nov 2017 11:12:56 +0100 Subject: [PATCH 4/5] Rename test/ references to tests/ --- .travis.yml | 4 ++-- Dockerfile | 4 ++-- TESTING.md | 6 +++--- tests/bin/run-tests-docker.sh | 4 ++-- tests/bin/run-tests-ubuntu.sh | 16 +++++++------- tests/bin/setup-integration-tests-ubuntu.sh | 8 +++---- tests/integration_tests/abao/load_fixture.py | 22 ++++++++++---------- 7 files changed, 32 insertions(+), 32 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4e1c6b03a..b80b05ec1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,10 +9,10 @@ env: install: - bin/install-ubuntu.sh - - test/bin/setup-integration-tests-ubuntu.sh + - tests/bin/setup-integration-tests-ubuntu.sh script: - - SCITRAN_PERSISTENT_DB_PORT=27017 test/bin/run-tests-ubuntu.sh + - SCITRAN_PERSISTENT_DB_PORT=27017 tests/bin/run-tests-ubuntu.sh after_success: - if [ "$TRAVIS_BRANCH" == "master" -o "$TRAVIS_EVENT_TYPE" == "pull_request" ]; then diff --git a/Dockerfile b/Dockerfile index b85b43277..88f6fc1d5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -77,8 +77,8 @@ RUN pip install --upgrade pip wheel setuptools \ && pip install -r /var/scitran/code/api/requirements-docker.txt \ && pip install -r /var/scitran/code/api/requirements.txt -COPY test /var/scitran/code/api/test/ -RUN bash -e -x /var/scitran/code/api/test/bin/setup-integration-tests-ubuntu.sh +COPY tests /var/scitran/code/api/tests/ +RUN bash -e -x /var/scitran/code/api/tests/bin/setup-integration-tests-ubuntu.sh # Copy full repo diff --git a/TESTING.md b/TESTING.md index 4e2684afa..c364360a1 100644 --- a/TESTING.md +++ b/TESTING.md @@ -19,7 +19,7 @@ See [py.test usage](https://docs.pytest.org/en/latest/usage.html) for more. ### Docker Build scitran-core image and run automated tests in a docker container: ``` -./test/bin/run-tests-docker.sh +./tests/bin/run-tests-docker.sh ``` * To skip building the image, use `--no-build` (`-B`) * To pass any arguments to `run-tests-ubuntu.sh`, use `-- TEST_ARGS` @@ -27,7 +27,7 @@ Build scitran-core image and run automated tests in a docker container: #### Example Without rebuilding the image, run only integration tests matching `foo`, use the highest verbosity level for test output and jump into a python debugger session in case an assertion fails: ``` -./test/bin/run-tests-docker.sh -B -- -L -U -A -- -k foo -vvv --pdb +./tests/bin/run-tests-docker.sh -B -- -L -U -A -- -k foo -vvv --pdb ``` ### Tools @@ -36,4 +36,4 @@ Without rebuilding the image, run only integration tests matching `foo`, use the ### Testing API against RAML with Abao Abao is one of the testing tools run during our TravisCI build. It tests the API implementation against what’s defined in the RAML spec. Adding a new resource / url to the RAML spec will cause Abao to verify that resource during integration tests. Sometimes abao cannot properly test a resource (file field uploads) or a test may require chaining variable. Abao has before and after hooks for tests, written in javascript. These can be used to skip a test, inject variables into the request, or make extra assertions about the response. See tests/integration/abao in the repo for the hooks file. See [abao github readme](https://github.com/cybertk/abao/blob/master/README.md) for more information on how to use hooks. -Abao tests can depend on specific resources (eg. group, project, session, etc.) pre-existing in the DB. That resource loading should be maintained within `test/integration_tests/abao/load_fixture.py` and is executed automatically via the integration test scripts at `test/bin`. +Abao tests can depend on specific resources (eg. group, project, session, etc.) pre-existing in the DB. That resource loading should be maintained within `tests/integration_tests/abao/load_fixture.py` and is executed automatically via the integration test scripts at `test/bin`. diff --git a/tests/bin/run-tests-docker.sh b/tests/bin/run-tests-docker.sh index 0fb0f8135..f27b034f6 100755 --- a/tests/bin/run-tests-docker.sh +++ b/tests/bin/run-tests-docker.sh @@ -14,7 +14,7 @@ Usage: Options: -B, --no-build Skip docker build -h, --help Print this help and exit - -- TEST_ARGS Arguments passed to test/bin/run-tests-ubuntu.sh + -- TEST_ARGS Arguments passed to tests/bin/run-tests-ubuntu.sh EOF } @@ -58,7 +58,7 @@ function main() { -v $(pwd):/var/scitran/code/api \ --entrypoint bash \ scitran-core:run-tests \ - /var/scitran/code/api/test/bin/run-tests-ubuntu.sh \ + /var/scitran/code/api/tests/bin/run-tests-ubuntu.sh \ $TEST_ARGS } diff --git a/tests/bin/run-tests-ubuntu.sh b/tests/bin/run-tests-ubuntu.sh index ce5952748..6f870493f 100755 --- a/tests/bin/run-tests-ubuntu.sh +++ b/tests/bin/run-tests-ubuntu.sh @@ -65,8 +65,8 @@ function main() { # Remove __pycache__ directories for issue with __file__ attribute due to # running the tests on the host creating bytecode files hich have a # mismatched __file__ attribute when loaded in docker container - rm -rf test/unit_tests/python/__pycache__ - rm -rf test/integration_tests/python/__pycache__ + rm -rf tests/unit_tests/python/__pycache__ + rm -rf tests/integration_tests/python/__pycache__ export PYTHONPATH="$(pwd)" export SCITRAN_SITE_API_URL="http://localhost:8081/api" @@ -90,7 +90,7 @@ function main() { if ${RUN_UNIT}; then echo "Running unit tests ..." rm -f .coverage - py.test --cov=api --cov-report= test/unit_tests/python $PYTEST_ARGS + py.test --cov=api --cov-report= tests/unit_tests/python $PYTEST_ARGS fi if ${RUN_INTEG} || ${RUN_ABAO}; then @@ -120,20 +120,20 @@ function main() { if ${RUN_INTEG}; then echo "Running integration tests ..." - py.test test/integration_tests/python $PYTEST_ARGS + py.test tests/integration_tests/python $PYTEST_ARGS fi if ${RUN_ABAO}; then echo "Running abao tests ..." # Create resources that Abao relies on - python test/integration_tests/abao/load_fixture.py + python tests/integration_tests/abao/load_fixture.py # If no VIRTUAL_ENV, make sure /usr/local/bin is in the path if [[ -z "${VIRTUAL_ENV:-}" ]]; then PATH="/usr/local/bin:$PATH" - npm install test/integration_tests + npm install tests/integration_tests else - npm install --global test/integration_tests + npm install --global tests/integration_tests fi PATH="$(npm bin):$PATH" @@ -145,7 +145,7 @@ function main() { # Have to change into definitions directory to resolve # relative $ref's in the jsonschema's pushd raml/schemas/definitions - NODE_PATH="$integration_test_node_modules" abao ../../api.raml "--server=$SCITRAN_SITE_API_URL" "--hookfiles=../../../test/integration_tests/abao/abao_test_hooks.js" + NODE_PATH="$integration_test_node_modules" abao ../../api.raml "--server=$SCITRAN_SITE_API_URL" "--hookfiles=../../../tests/integration_tests/abao/abao_test_hooks.js" popd fi } diff --git a/tests/bin/setup-integration-tests-ubuntu.sh b/tests/bin/setup-integration-tests-ubuntu.sh index c384ac0ee..9a700f37d 100755 --- a/tests/bin/setup-integration-tests-ubuntu.sh +++ b/tests/bin/setup-integration-tests-ubuntu.sh @@ -1,13 +1,13 @@ -set -e - +#!/usr/bin/env bash +set -eu unset CDPATH cd "$( dirname "${BASH_SOURCE[0]}" )/../.." -sudo pip install -U -r "test/integration_tests/requirements-integration-test.txt" +sudo pip install -U -r "tests/integration_tests/requirements-integration-test.txt" NODE_URL="https://nodejs.org/dist/v6.4.0/node-v6.4.0-linux-x64.tar.gz" -if [ -z "$VIRTUAL_ENV" ]; then +if [[ -z "${VIRTUAL_ENV:-}" ]]; then curl $NODE_URL | sudo tar xz -C /usr/local --strip-components 1 else curl $NODE_URL | tar xz -C $VIRTUAL_ENV --strip-components 1 diff --git a/tests/integration_tests/abao/load_fixture.py b/tests/integration_tests/abao/load_fixture.py index 18a8a675a..32c036bdc 100644 --- a/tests/integration_tests/abao/load_fixture.py +++ b/tests/integration_tests/abao/load_fixture.py @@ -59,7 +59,7 @@ def main(): # upload file to test-project-1/test-session-1/test-acquisition-1 # depends on 'create test-group' r = as_root.post('/upload/label', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'group': { '_id': 'test-group' }, 'project': { @@ -157,7 +157,7 @@ def main(): # upload file to test-collection-1 # depends on 'create test-collection-1' r = as_root.post('/collections/' + test_collection['_id'] + '/files', files={ - 'file': ('notes.txt', open('test/integration_tests/abao/test_files/notes.txt', 'rb')) + 'file': ('notes.txt', open('tests/integration_tests/abao/test_files/notes.txt', 'rb')) }) assert r.ok @@ -170,14 +170,14 @@ def main(): # upload file to test-project-1 # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/projects/' + test_project['_id'] + '/files', files={ - 'file': ('notes.txt', open('test/integration_tests/abao/test_files/notes.txt', 'rb')) + 'file': ('notes.txt', open('tests/integration_tests/abao/test_files/notes.txt', 'rb')) }) assert r.ok # upload file to test-session-1 # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/sessions/' + test_session['_id'] + '/files', files={ - 'file': ('notes.txt', open('test/integration_tests/abao/test_files/notes.txt', 'rb')) + 'file': ('notes.txt', open('tests/integration_tests/abao/test_files/notes.txt', 'rb')) }) assert r.ok @@ -232,7 +232,7 @@ def main(): # create session 1 test-analysis (file upload) # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/sessions/' + test_session['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis', 'inputs': [ { 'name': 'test-1.dcm' } ] @@ -249,7 +249,7 @@ def main(): # create acquisition 1 test-analysis (file upload) # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/acquisitions/' + test_acquisition['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis', 'inputs': [ { 'name': 'test-1.dcm' } ] @@ -261,7 +261,7 @@ def main(): # create acquisition 1 test-analysis 2 (file upload) # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/acquisitions/' + test_acquisition['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis', 'inputs': [ { 'name': 'test-1.dcm' } ] @@ -272,7 +272,7 @@ def main(): # create collection 1 test-analysis (file upload) # depends on 'create test-collection-1' r = as_root.post('/collections/' + test_collection['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis', 'inputs': [ { 'name': 'test-1.dcm' } ] @@ -284,7 +284,7 @@ def main(): # create collection 1 test-analysis 2 (file upload) # depends on 'create test-collection-1' r = as_root.post('/collections/' + test_collection['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis 2', 'inputs': [ { 'name': 'test-1.dcm' } ] @@ -295,7 +295,7 @@ def main(): # create project 1 test-analysis (file upload) # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/projects/' + test_project['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis', 'inputs': [ { 'name': 'test-1.dcm' } ] @@ -307,7 +307,7 @@ def main(): # create project 1 test-analysis 2 (file upload) # depends on 'upload file to test-project-1/test-session-1/test-acquisition-1' r = as_root.post('/projects/' + test_project['_id'] + '/analyses', files={ - 'file': ('test-1.dcm', open('test/integration_tests/abao/test_files/test-1.dcm', 'rb')), + 'file': ('test-1.dcm', open('tests/integration_tests/abao/test_files/test-1.dcm', 'rb')), 'metadata': ('', json.dumps({ 'label': 'test analysis', 'inputs': [ { 'name': 'test-1.dcm' } ] From d4cb69af0c861e10fd63b1ef1d042742d638afbd Mon Sep 17 00:00:00 2001 From: Ambrus Simon Date: Wed, 8 Nov 2017 14:44:43 +0100 Subject: [PATCH 5/5] Switch from opt-out to opt-in flags --- CONTRIBUTING.md | 2 +- TESTING.md | 12 ++++---- tests/bin/run-tests-docker.sh | 4 +-- tests/bin/run-tests-ubuntu.sh | 55 ++++++++++++++++++++--------------- 4 files changed, 41 insertions(+), 32 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 555188911..28033cb4a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,7 +27,7 @@ Changes to `requirements.txt` should always be by pull request. - Add docstrings to all functions with a one-line description of its purpose. ### Format -Ensure that `./test/bin/lint.sh api` exits without errors. +Ensure that `./tests/bin/run-tests-docker.sh -- -l` exits without errors. ### Commit Messages 1. The subject line should be a phrase describing the commit and limited to 50 characters diff --git a/TESTING.md b/TESTING.md index c364360a1..26b292e5c 100644 --- a/TESTING.md +++ b/TESTING.md @@ -8,10 +8,12 @@ Run automated tests: ./test/bin/setup-integration-tests-ubuntu.sh ./test/bin/run-tests-ubuntu.sh ``` -* To skip linting, use `--no-lint` (`-L`) -* To skip unit tests, use `--no-unit` (`-U`) -* To skip integration tests, use `--no-integ` (`-I`) -* To skip abao tests, use `--no-abao` (`-A`) +All tests are executed by default. Subsets can be run using the filtering options: + +* To run linting, use `--lint` (`-l`) +* To run unit tests, use `--unit` (`-u`) +* To run integration tests, use `--integ` (`-i`) +* To run abao tests, use `--abao` (`-a`) * To pass any arguments to `py.test`, use `-- PYTEST_ARGS` See [py.test usage](https://docs.pytest.org/en/latest/usage.html) for more. @@ -27,7 +29,7 @@ Build scitran-core image and run automated tests in a docker container: #### Example Without rebuilding the image, run only integration tests matching `foo`, use the highest verbosity level for test output and jump into a python debugger session in case an assertion fails: ``` -./tests/bin/run-tests-docker.sh -B -- -L -U -A -- -k foo -vvv --pdb +./tests/bin/run-tests-docker.sh -B -- -i -- -k foo -vvv --pdb ``` ### Tools diff --git a/tests/bin/run-tests-docker.sh b/tests/bin/run-tests-docker.sh index f27b034f6..afcd42c22 100755 --- a/tests/bin/run-tests-docker.sh +++ b/tests/bin/run-tests-docker.sh @@ -34,13 +34,13 @@ function main() { shift done - trap clean_up EXIT - if ${DOCKER_BUILD}; then echo "Building scitran-core:run-tests ..." docker build -t scitran-core:run-tests . fi + trap clean_up EXIT + docker network create scitran-core-test-network # Launch Mongo instance diff --git a/tests/bin/run-tests-ubuntu.sh b/tests/bin/run-tests-ubuntu.sh index 6f870493f..8c7d94fa5 100755 --- a/tests/bin/run-tests-ubuntu.sh +++ b/tests/bin/run-tests-ubuntu.sh @@ -11,14 +11,17 @@ Run scitran-core tests Usage: $0 [OPTION...] +Runs linting and all tests if no options are provided. +Runs subset of tests when using the filtering options. +Displays coverage report if all tests ran and passed. + Options: - -L, --no-lint Skip linting - -U, --no-unit Skip unit tests - -I, --no-integ Skip integration tests - -A, --no-abao Skip abao tests - -R, --no-report Skip coverage report - -h, --help Print this help and exit - -- PYTEST_ARGS Arguments passed to py.test + -l, --lint Run linting + -u, --unit Run unit tests + -i, --integ Run integration tests + -a, --abao Run abao tests + -h, --help Print this help and exit + -- PYTEST_ARGS Arguments passed to py.test Envvars: SCITRAN_PERSISTENT_DB_PORT (9001) @@ -33,31 +36,35 @@ EOF function main() { - local RUN_LINT=true - local RUN_UNIT=true - local RUN_INTEG=true - local RUN_ABAO=true + export RUN_ALL=true + local RUN_LINT=false + local RUN_UNIT=false + local RUN_INTEG=false + local RUN_ABAO=false local PYTEST_ARGS= - export RUN_REPORT=true - while [[ "$#" > 0 ]]; do case "$1" in - -L|--no-lint) RUN_LINT=false ;; - -U|--no-unit) RUN_UNIT=false ;; - -I|--no-integ) RUN_INTEG=false ;; - -A|--no-abao) RUN_ABAO=false ;; - -R|--no-report) RUN_REPORT=false ;; - -h|--help) usage; exit 0 ;; - --) PYTEST_ARGS="${@:2}"; break ;; + -l|--lint) RUN_ALL=false; RUN_LINT=true ;; + -u|--unit) RUN_ALL=false; RUN_UNIT=true ;; + -i|--integ) RUN_ALL=false; RUN_INTEG=true ;; + -a|--abao) RUN_ALL=false; RUN_ABAO=true ;; + -h|--help) usage; exit 0 ;; + --) PYTEST_ARGS="${@:2}"; break ;; *) echo "Invalid argument: $1" >&2; usage; exit 1 ;; esac shift done - if ! (${RUN_LINT} && ${RUN_UNIT} && ${RUN_INTEG} && ${RUN_ABAO}); then - # Skip coverage report if any tests are skipped - RUN_REPORT=false + if ${RUN_ALL}; then + # No filtering options used, run everything by default + RUN_LINT=true + RUN_UNIT=true + RUN_INTEG=true + RUN_ABAO=true + elif ${RUN_LINT} && ${RUN_UNIT} && ${RUN_INTEG} && ${RUN_ABAO}; then + # All filtering options were used, the same as none + RUN_ALL=true fi trap clean_up EXIT @@ -164,7 +171,7 @@ function clean_up () { wait 2> /dev/null fi - if ${RUN_REPORT} && [[ "${TEST_RESULT_CODE}" == "0" ]]; then + if ${RUN_ALL} && [[ "${TEST_RESULT_CODE}" == "0" ]]; then echo echo "UNIT TEST COVERAGE:" coverage report --skip-covered