diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..63df785 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,18 @@ +.git/ +.github/ +.dockerignore +Dockerfile + +*~ +*.DS_Store +*.egg-info/ +__pycache__/ + +.docker + +.idea/ +.vscode/ + +examples/ + +venv/ diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..cb25c93 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,181 @@ +# Continuous integration testing for ChRIS Plugin. +# https://github.com/FNNDSC/python-chrisapp-template/wiki/Continuous-Integration +# +# - on push and PR: run pytest +# - on push to main: build and push container images as ":latest" +# - on push to semver tag: build and push container image with tag and +# upload plugin description to https://chrisstore.co + +name: build + +on: + push: + branches: [ main ] + tags: + - "v?[0-9]+.[0-9]+.[0-9]+*" + pull_request: + branches: [ main ] + +jobs: + test: + name: Unit tests + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - name: Build + uses: docker/build-push-action@v5 + with: + build-args: extras_require=dev + context: . + load: true + push: false + tags: "localhost/local/app:dev" + cache-from: type=gha + cache-to: type=gha,mode=max + - name: Run pytest + run: | + docker run -v "$GITHUB_WORKSPACE:/app:ro" -w /app localhost/local/app:dev \ + pytest -o cache_dir=/tmp/pytest + + build: + name: Build + if: github.event_name == 'push' || github.event_name == 'release' + # needs: [ test ] # uncomment to require passing tests + runs-on: ubuntu-22.04 + + steps: + - name: Decide image tags + id: info + shell: python + run: | + import os + import itertools + + def join_tag(t): + registry, repo, tag = t + return f'{registry}/{repo}:{tag}'.lower() + + registries = ['docker.io', 'ghcr.io'] + repos = ['${{ github.repository }}'] + if '${{ github.ref_type }}' == 'branch': + tags = ['latest'] + elif '${{ github.ref_type }}' == 'tag': + tag = '${{ github.ref_name }}' + version = tag[1:] if tag.startswith('v') else tag + tags = ['latest', version] + else: + tags = [] + + if '${{ github.ref_type }}' == 'tag': + local_tag = join_tag(('ghcr.io', '${{ github.repository }}', version)) + else: + local_tag = join_tag(('localhost', '${{ github.repository }}', 'latest')) + + product = itertools.product(registries, repos, tags) + tags_csv = ','.join(map(join_tag, product)) + outputs = { + 'tags_csv' : tags_csv, + 'push' : 'true' if tags_csv else 'false', + 'local_tag': local_tag + } + with open(os.environ['GITHUB_OUTPUT'], 'a') as out: + for k, v in outputs.items(): + out.write(f'{k}={v}\n') + + - uses: actions/checkout@v4 + # QEMU is used for non-x86_64 builds + - uses: docker/setup-qemu-action@v3 + # buildx adds additional features to docker build + - uses: docker/setup-buildx-action@v3 + with: + driver-opts: network=host + + # Here, we want to do the docker build twice: + # The first build pushes to our local registry for testing. + # The second build pushes to Docker Hub and ghcr.io + - name: Build (local only) + uses: docker/build-push-action@v3 + id: docker_build + with: + context: . + file: ./Dockerfile + tags: ${{ steps.info.outputs.local_tag }} + load: true + cache-from: type=gha + # If you have a directory called examples/incoming/ and examples/outgoing/, then + # run your ChRIS plugin with no parameters, and assert that it creates all the files + # which are expected. File contents are not compared. + - name: Run examples + id: run_examples + run: | + if ! [ -d 'examples/incoming/' ] || ! [ -d 'examples/outgoing/' ]; then + echo "No examples." + exit 0 + fi + + dock_image=${{ steps.info.outputs.local_tag }} + output_dir=$(mktemp -d) + cmd=$(docker image inspect -f '{{ (index .Config.Cmd 0) }}' $dock_image) + docker run --rm -u "$(id -u):$(id -g)" \ + -v "$PWD/examples/incoming:/incoming:ro" \ + -v "$output_dir:/outgoing:rw" \ + $dock_image $cmd /incoming /outgoing + + for expected_file in $(find examples/outgoing -type f); do + fname="${expected_file##*/}" + out_path="$output_dir/$fname" + printf "Checking output %s exists..." "$out_path" + if [ -f "$out_path" ]; then + echo "ok" + else + echo "not found" + exit 1 + fi + done + + - name: Login to DockerHub + if: (github.event_name == 'push' || github.event_name == 'release') && contains(steps.info.outputs.tags_csv, 'docker.io') + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + - name: Login to GitHub Container Registry + if: (github.event_name == 'push' || github.event_name == 'release') && contains(steps.info.outputs.tags_csv, 'ghcr.io') + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build and push + uses: docker/build-push-action@v5 + if: (github.event_name == 'push' || github.event_name == 'release') + with: + context: . + file: ./Dockerfile + tags: ${{ steps.info.outputs.tags_csv }} + # if non-x86_84 architectures are supported, add them here + platforms: linux/amd64 #,linux/arm64,linux/ppc64le + push: ${{ steps.info.outputs.push }} + cache-to: type=gha,mode=max + + - name: Upload ChRIS Plugin + id: upload + if: github.ref_type == 'tag' + uses: FNNDSC/upload-chris-plugin@v1 + with: + dock_image: ${{ steps.info.outputs.local_tag }} + username: ${{ secrets.CHRISPROJECT_USERNAME }} + password: ${{ secrets.CHRISPROJECT_PASSWORD }} + chris_url: https://cube.chrisproject.org/api/v1/ + compute_names: NERC + + - name: Update DockerHub description + if: steps.upload.outcome == 'success' + uses: peter-evans/dockerhub-description@v3 + continue-on-error: true # it is not crucial that this works + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_PASSWORD }} + short-description: ${{ steps.upload.outputs.title }} + readme-filepath: ./README.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ffd9fdb --- /dev/null +++ b/.gitignore @@ -0,0 +1,11 @@ +*~ +*.DS_Store +*.egg-info/ +__pycache__/ + +.docker + +.idea/ +.vscode/ + +venv/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e87fdfa --- /dev/null +++ b/Dockerfile @@ -0,0 +1,22 @@ +# Python version can be changed, e.g. +# FROM python:3.8 +# FROM ghcr.io/mamba-org/micromamba:1.5.1-focal-cuda-11.3.1 +FROM docker.io/python:3.12.1-slim-bookworm + +LABEL org.opencontainers.image.authors="FNNDSC " \ + org.opencontainers.image.title="Spleen data downloader" \ + org.opencontainers.image.description="A ChRIS FS plugin to download a set of spleen data" + +ARG SRCDIR=/usr/local/src/pl-spleendata +WORKDIR ${SRCDIR} + +COPY requirements.txt . +RUN --mount=type=cache,sharing=private,target=/root/.cache/pip pip install -r requirements.txt + +COPY . . +ARG extras_require=none +RUN pip install ".[${extras_require}]" \ + && cd / && rm -rf ${SRCDIR} +WORKDIR / + +CMD ["spleendata"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d3ffa30 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 FNNDSC / BCH + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..32ab12b --- /dev/null +++ b/README.md @@ -0,0 +1,128 @@ +# Spleen data downloader + +[![Version](https://img.shields.io/docker/v/fnndsc/pl-spleendatads?sort=semver)](https://hub.docker.com/r/fnndsc/pl-spleendatadsds) +[![MIT License](https://img.shields.io/github/license/fnndsc/pl-spleendatads)](https://github.com/FNNDSC/pl-spleendatads/blob/main/LICENSE) +[![ci](https://github.com/FNNDSC/pl-spleendatads/actions/workflows/ci.yml/badge.svg)](https://github.com/FNNDSC/pl-spleendatads/actions/workflows/ci.yml) + +`pl-spleendatads` is a [_ChRIS_](https://chrisproject.org/) _DS_ plugin which downloads an exemplar spleen dataset useful for training and inference experiments. + +## Abstract + +This is a simple _DS_ plugin suitable for training and inference on 3D spleen NiFTI volumes, as part of the [MONAI spleen segmentation exemplar notebook](https://github.com/Project-MONAI/tutorials/blob/main/3d_segmentation/spleen_segmentation_3d.ipynb). _DS_ plugins are suitable as non-root nodes of ChRIS compute trees, i.e. nodes that have a parent node. If you need a _root node_ spleen data origin, use the [companion _FS_ spleen data node](https://github.com/FNNDSC/pl-spleendata). + +By default, the download is pretty big -- 1.2Gb, so make sure you have time and space. It is possible to post-download prune this. For example, if you are only interested in _training_, you can use a `--trainingOnly` flag which will prune out the 43Mb of testing NiFTI volumes. Conversely, if you are just interested in _inference_, the `--testingOnly` will remove the post download 1.2Gb of training data, saving lots of space. + +You still need to download the whole set, however, before you can prune. + +## Installation + +`pl-spleendatads` is a _[ChRIS](https://chrisproject.org/) plugin_, meaning it can run from either within _ChRIS_ or the command-line. + +## Local Usage + +### On the metal + +If you have checked out the repo, you can simply run `spleendatads` using + +```shell +source venv/bin/activate +pip install -U ./ +spleendatads input/ output/ +``` + +### PyPI + +Alternatively, you can just do a + +```shell +pip install spleendatads +``` + +to get directly from PyPI. + +### apptainer + +The recommended way is to use [Apptainer](https://apptainer.org/) (a.k.a. Singularity) to run `pl-spleendatads` as a container: + +```shell +apptainer exec docker://fnndsc/pl-spleendatads spleendatads [--args values...] input/ output/ +``` + +To print its available options, run: + +```shell +apptainer exec docker://fnndsc/pl-spleendatads spleendatads --help +``` + +## Examples + +`spleendatads`, being a ChRIS _DS_ plugin, requires two positional arguments: an input directory from the upstream parent, and a directory that will contain the output data. Simply create an empty `input` and `output`. + +```shell +mkdir output +apptainer exec docker://fnndsc/pl-spleendatads:latest spleendatads [--args] input/ output/ +``` + +## Development + +Instructions for developers. + +### Building + +Build a local container image: + +```shell +docker build -t localhost/fnndsc/pl-spleendatads . +``` + +### Running + +Mount the source code `spleendatads.py` into a container to try out changes without rebuild. + +```shell +docker run --rm -it --userns=host -u $(id -u):$(id -g) \ + -v $PWD/spleendatads.py:/usr/local/lib/python3.12/site-packages/spleendatads.py:ro \ + -v $PWD/in:/incoming:ro -v $PWD/out:/outgoing:rw -w /outgoing \ + localhost/fnndsc/pl-spleendatads spleendatads /incoming /outgoing +``` + +### Testing + +Run unit tests using `pytest`. It's recommended to rebuild the image to ensure that sources are up-to-date. Use the option `--build-arg extras_require=dev` to install extra dependencies for testing. + +```shell +docker build -t localhost/fnndsc/pl-spleendatads:dev --build-arg extras_require=dev . +docker run --rm -it localhost/fnndsc/pl-spleendatads:dev pytest +``` + +## Release + +Steps for release can be automated by [Github Actions](.github/workflows/ci.yml). +This section is about how to do those steps manually. + +### Increase Version Number + +Increase the version number in `setup.py` and commit this file. + +### Push Container Image + +Build and push an image tagged by the version. For example, for version `1.2.3`: + +``` +docker build -t docker.io/fnndsc/pl-spleendatads:1.2.3 . +docker push docker.io/fnndsc/pl-spleendatads:1.2.3 +``` + +### Get JSON Representation + +Run [`chris_plugin_info`](https://github.com/FNNDSC/chris_plugin#usage) +to produce a JSON description of this plugin, which can be uploaded to _ChRIS_. + +```shell +docker run --rm docker.io/fnndsc/pl-spleendatads:1.2.3 chris_plugin_info -d docker.io/fnndsc/pl-spleendatads:1.2.3 > chris_plugin_info.json +``` + +Intructions on how to upload the plugin to _ChRIS_ can be found here: +https://chrisproject.org/docs/tutorials/upload_plugin + +_-30-_ diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..2707068 --- /dev/null +++ b/README.rst @@ -0,0 +1,170 @@ +Spleen data downloader +====================== + +|Version| |MIT License| |ci| + +``pl-spleendata`` is a `ChRIS `__ *FS* plugin +which downloads an exemplar spleen dataset useful for training and +inference experiments. + +Abstract +-------- + +This is a simple *FS* plugin suitable for training and inference on 3D +spleen NiFTI volumes, as part of the `MONAI spleen segmentation exemplar +notebook `__. + +By default, the download is pretty big – 1.2Gb, so make sure you have +time and space. It is possible to post-download prune this. For example, +if you are only interested in *training*, you can use a +``--trainingOnly`` flag which will prune out the 43Mb of testing NiFTI +volumes. Conversely, if you are just interested in *inference*, the +``--testingOnly`` will remove the post download 1.2Gb of training data, +saving lots of space. + +You still need to download the whole set, however, before you can prune. + +Installation +------------ + +``pl-spleendata`` is a `ChRIS `__ *plugin*, +meaning it can run from either within *ChRIS* or the command-line. + +Local Usage +----------- + +On the metal +~~~~~~~~~~~~ + +If you have checked out the repo, you can simply run ``spleendata`` +using + +.. code:: shell + + source venv/bin/activate + pip install -U ./ + spleendata output/ + +PyPI +~~~~ + +Alternatively, you can just do a + +.. code:: shell + + pip install spleendata + +to get directly from PyPI. + +apptainer +~~~~~~~~~ + +The recommended way is to use `Apptainer `__ +(a.k.a. Singularity) to run ``pl-spleendata`` as a container: + +.. code:: shell + + apptainer exec docker://fnndsc/pl-spleendata spleendata [--args values...] output/ + +To print its available options, run: + +.. code:: shell + + apptainer exec docker://fnndsc/pl-spleendata spleendata --help + +Examples +-------- + +``spleendata``, being a ChRIS *FS* plugin, only requires one positional +argument: a directory that will contain the output data. Simply create +an empty ``output``. + +.. code:: shell + + mkdir output + apptainer exec docker://fnndsc/pl-spleendata:latest spleendata [--args] incoming/ outgoing/ + +Development +----------- + +Instructions for developers. + +Building +~~~~~~~~ + +Build a local container image: + +.. code:: shell + + docker build -t localhost/fnndsc/pl-spleendata . + +Running +~~~~~~~ + +Mount the source code ``spleendata.py`` into a container to try out +changes without rebuild. + +.. code:: shell + + docker run --rm -it --userns=host -u $(id -u):$(id -g) \ + -v $PWD/spleendata.py:/usr/local/lib/python3.11/site-packages/spleendata.py:ro \ + -v $PWD/in:/incoming:ro -v $PWD/out:/outgoing:rw -w /outgoing \ + localhost/fnndsc/pl-spleendata spleendata /incoming /outgoing + +Testing +~~~~~~~ + +Run unit tests using ``pytest``. It’s recommended to rebuild the image +to ensure that sources are up-to-date. Use the option +``--build-arg extras_require=dev`` to install extra dependencies for +testing. + +.. code:: shell + + docker build -t localhost/fnndsc/pl-spleendata:dev --build-arg extras_require=dev . + docker run --rm -it localhost/fnndsc/pl-spleendata:dev pytest + +Release +------- + +Steps for release can be automated by `Github +Actions <.github/workflows/ci.yml>`__. This section is about how to do +those steps manually. + +Increase Version Number +~~~~~~~~~~~~~~~~~~~~~~~ + +Increase the version number in ``setup.py`` and commit this file. + +Push Container Image +~~~~~~~~~~~~~~~~~~~~ + +Build and push an image tagged by the version. For example, for version +``1.2.3``: + +:: + + docker build -t docker.io/fnndsc/pl-spleendata:1.2.3 . + docker push docker.io/fnndsc/pl-spleendata:1.2.3 + +Get JSON Representation +~~~~~~~~~~~~~~~~~~~~~~~ + +Run +```chris_plugin_info`` `__ +to produce a JSON description of this plugin, which can be uploaded to +*ChRIS*. + +.. code:: shell + + docker run --rm docker.io/fnndsc/pl-spleendata:1.2.3 chris_plugin_info -d docker.io/fnndsc/pl-spleendata:1.2.3 > chris_plugin_info.json + +Intructions on how to upload the plugin to *ChRIS* can be found here: +https://chrisproject.org/docs/tutorials/upload_plugin + +.. |Version| image:: https://img.shields.io/docker/v/fnndsc/pl-spleendata?sort=semver + :target: https://hub.docker.com/r/fnndsc/pl-spleendata +.. |MIT License| image:: https://img.shields.io/github/license/fnndsc/pl-spleendata + :target: https://github.com/FNNDSC/pl-spleendata/blob/main/LICENSE +.. |ci| image:: https://github.com/FNNDSC/pl-spleendata/actions/workflows/ci.yml/badge.svg + :target: https://github.com/FNNDSC/pl-spleendata/actions/workflows/ci.yml diff --git a/pypi.sh b/pypi.sh new file mode 100755 index 0000000..a31885e --- /dev/null +++ b/pypi.sh @@ -0,0 +1,43 @@ +#!/bin/bash + +G_SYNOPSIS=" + + NAME + + pypi.sh + + SYNOPSIS + + pypi.sh + + ARGS + + + A version string to upload. Typically something like '0.20.22'. + + DESCRIPTION + + pypi.sh is a simple helper script to tag and upload a new version of pypi.sh + + +" + +if (( $# != 1 )) ; then + echo "$G_SYNOPSIS" + exit 1 +fi + +VER=$1 +DIR=$PWD +pandoc --from=markdown --to=rst --output=README.rst README.md +git commit -am "v${VER}" +git push origin main +git tag $VER +git push origin --tags + +#rstcheck README.rst +python3 setup.py sdist +cd $DIR +twine upload dist/spleendata-${VER}.tar.gz + + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..96d114d --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +chris_plugin==0.4.0 +monai-weekly +tqdm +twine diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..a8db19d --- /dev/null +++ b/setup.py @@ -0,0 +1,46 @@ +from setuptools import setup +import re + +_version_re = re.compile(r"(?<=^__version__ = (\"|'))(.+)(?=\"|')") + + +def get_version(rel_path: str) -> str: + """ + Searches for the ``__version__ = `` line in a source code file. + + https://packaging.python.org/en/latest/guides/single-sourcing-package-version/ + """ + with open(rel_path, "r") as f: + matches = map(_version_re.search, f) + filtered = filter(lambda m: m is not None, matches) + version = next(filtered, None) + if version is None: + raise RuntimeError(f"Could not find __version__ in {rel_path}") + return version.group(0) + + +def readme(): + with open("README.rst") as f: + return f.read() + + +setup( + name="spleendata", + version=get_version("spleendata.py"), + description="A ChRIS FS plugin to download a set of spleen data", + long_description=readme(), + author="FNNDSC", + author_email="dev@babyMRI.org", + url="https://github.com/rudolphpienaar/pl-spleendata", + py_modules=["spleendatads"], + install_requires=["chris_plugin"], + license="MIT", + entry_points={"console_scripts": ["spleendata = spleendata:main"]}, + classifiers=[ + "License :: OSI Approved :: MIT License", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Bio-Informatics", + "Topic :: Scientific/Engineering :: Medical Science Apps.", + ], + extras_require={"none": [], "dev": ["pytest~=7.1"]}, +) diff --git a/spleendatads.py b/spleendatads.py new file mode 100644 index 0000000..ce86747 --- /dev/null +++ b/spleendatads.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +from pathlib import Path +import shutil +from argparse import ArgumentParser, Namespace, ArgumentDefaultsHelpFormatter + +from chris_plugin import chris_plugin + +from monai.apps.utils import download_and_extract + +__version__ = "1.0.0" + +DISPLAY_TITLE = r""" + + _ _ _ _ _ + | | | | | | | | | | + _ __ | |______ ___ _ __ | | ___ ___ _ __ __| | __ _| |_ __ _ __| |___ +| '_ \| |______/ __| '_ \| |/ _ \/ _ \ '_ \ / _` |/ _` | __/ _` |/ _` / __| +| |_) | | \__ \ |_) | | __/ __/ | | | (_| | (_| | || (_| | (_| \__ \ +| .__/|_| |___/ .__/|_|\___|\___|_| |_|\__,_|\__,_|\__\__,_|\__,_|___/ +| | | | +|_| |_| + +""" + + +parser = ArgumentParser( + description=""" + A ChRIS DS plugin that downloads a spleen data set for training + and inference. Based off a MONAI exemplar: + + https://github.com/Project-MONAI/tutorials/blob/main/3d_segmentation/spleen_segmentation_3d.ipynb + """, + formatter_class=ArgumentDefaultsHelpFormatter, +) +parser.add_argument( + "--skipDownload", + default=False, + action="store_true", + help="""If specified, skip the download. Only useful for debugging/testing really.""", +) +parser.add_argument( + "--trainingOnly", + default=False, + action="store_true", + help="""If specified, only preserve the training data (saving about 462Mb)""", +) +parser.add_argument( + "--testingOnly", + default=False, + action="store_true", + help="""If specified, only preserve the testing data (saving about 1.2Gb)""", +) +parser.add_argument( + "-V", "--version", action="version", version=f"%(prog)s {__version__}" +) + + +def dir_findAndDelete(startdir: Path, target: str): + for item in startdir.iterdir(): + if item.is_dir(): + if item.name == target: + shutil.rmtree(str(item)) + # item.rmdir() + print(f"Deleted directory: {target}") + break + else: + dir_findAndDelete(item, target) + + +# The main function of this *ChRIS* plugin is denoted by this ``@chris_plugin`` "decorator." +# Some metadata about the plugin is specified here. There is more metadata specified in setup.py. +# +# documentation: https://fnndsc.github.io/chris_plugin/chris_plugin.html#chris_plugin +@chris_plugin( + parser=parser, + title="Spleen data downloader", + category="", # ref. https://chrisstore.co/plugins + min_memory_limit="100Mi", # supported units: Mi, Gi + min_cpu_limit="1000m", # millicores, e.g. "1000m" = 1 CPU core + min_gpu_limit=0, # set min_gpu_limit=1 to enable GPU +) +def main(options: Namespace, inputdir: Path, outputdir: Path): + """ + *ChRIS* plugins usually have two positional arguments: an **input directory** containing + input files and an **output directory** where to write output files. Command-line arguments + are passed to this main method implicitly when ``main()`` is called below without parameters. + + :param options: non-positional arguments parsed by the parser given to @chris_plugin + :param inputdir: directory containing (read-only) input files + :param outputdir: directory where to write output files + """ + + print(DISPLAY_TITLE) + resource: str = "https://msd-for-monai.s3-us-west-2.amazonaws.com/Task09_Spleen.tar" + md5: str = "410d4a301da4e5b2f6f86ec3ddba524e" + + compressed_file: Path = outputdir / "Task09_Spleen.tar" + data_dir: Path = outputdir / "Task09_Spleen" + if not data_dir.exists() or options.skipDownload: + download_and_extract(resource, str(compressed_file), str(outputdir), md5) + + if compressed_file.exists(): + compressed_file.unlink() + if options.trainingOnly: + dir_findAndDelete(outputdir, "imagesTs") + if options.testingOnly: + dir_findAndDelete(outputdir, "imagesTr") + dir_findAndDelete(outputdir, "labelsTr") + + +if __name__ == "__main__": + main() diff --git a/tests/test_example.py b/tests/test_example.py new file mode 100644 index 0000000..c915865 --- /dev/null +++ b/tests/test_example.py @@ -0,0 +1,21 @@ +from pathlib import Path + +from spleendata import parser, main + + +def test_main(tmp_path: Path): + # setup example data + inputdir = tmp_path / 'incoming' + outputdir = tmp_path / 'outgoing' + inputdir.mkdir() + outputdir.mkdir() + (inputdir / 'plaintext.txt').write_text('hello ChRIS, I am a ChRIS plugin') + + # simulate run of main function + options = parser.parse_args(['--word', 'ChRIS', '--pattern', '*.txt']) + main(options, inputdir, outputdir) + + # assert behavior is expected + expected_output_file = outputdir / 'plaintext.count.txt' + assert expected_output_file.exists() + assert expected_output_file.read_text() == '2'