diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index d0097882..b4a091ea 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -9,14 +9,12 @@ on: paths: - 'narps_open/**' - 'setup.py' - - 'setup.cfg' - 'pytest.ini' - 'tests/conftest.py' pull_request: paths: - 'narps_open/**' - 'setup.py' - - 'setup.cfg' - 'pytest.ini' - 'tests/conftest.py' diff --git a/CONTENT.md b/CONTENT.md index 5fbeed5a..74481284 100644 --- a/CONTENT.md +++ b/CONTENT.md @@ -1,4 +1,4 @@ -### Contents overview +# Contents overview - :snake: :package: `narps_open/` contains the Python package with all the pipelines logic. - :brain: `data/` contains data that is used by the pipelines, as well as the (intermediate or final) results data. Instructions to download data are available in [INSTALL.md](/INSTALL.md#data-download-instructions). diff --git a/Dockerfile b/Dockerfile index 13b22194..a2c4e8ba 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,319 +1,8 @@ -# Generated by: Neurodocker version 0.7.0+0.gdc97516.dirty -# Latest release: Neurodocker version 0.7.0 -# Timestamp: 2021/11/09 11:04:47 UTC -# -# Thank you for using Neurodocker. If you discover any issues -# or ways to improve this software, please submit an issue or -# pull request on our GitHub repository: -# -# https://github.com/ReproNim/neurodocker +FROM nipype/nipype:py38 -FROM neurodebian:stretch-non-free +WORKDIR /work -USER root +COPY narps_open/ ./narps_open/ +COPY setup.py ./ -ARG DEBIAN_FRONTEND="noninteractive" - -ENV LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - ND_ENTRYPOINT="/neurodocker/startup.sh" -RUN export ND_ENTRYPOINT="/neurodocker/startup.sh" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - apt-utils \ - bzip2 \ - ca-certificates \ - curl \ - locales \ - unzip \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen \ - && dpkg-reconfigure --frontend=noninteractive locales \ - && update-locale LANG="en_US.UTF-8" \ - && chmod 777 /opt && chmod a+s /opt \ - && mkdir -p /neurodocker \ - && if [ ! -f "$ND_ENTRYPOINT" ]; then \ - echo '#!/usr/bin/env bash' >> "$ND_ENTRYPOINT" \ - && echo 'set -e' >> "$ND_ENTRYPOINT" \ - && echo 'export USER="${USER:=`whoami`}"' >> "$ND_ENTRYPOINT" \ - && echo 'if [ -n "$1" ]; then "$@"; else /usr/bin/env bash; fi' >> "$ND_ENTRYPOINT"; \ - fi \ - && chmod -R 777 /neurodocker && chmod a+s /neurodocker - -ENTRYPOINT ["/neurodocker/startup.sh"] - -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - git \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -ENV FSLDIR="/opt/fsl-6.0.3" \ - PATH="/opt/fsl-6.0.3/bin:$PATH" \ - FSLOUTPUTTYPE="NIFTI_GZ" \ - FSLMULTIFILEQUIT="TRUE" \ - FSLTCLSH="/opt/fsl-6.0.3/bin/fsltclsh" \ - FSLWISH="/opt/fsl-6.0.3/bin/fslwish" \ - FSLLOCKDIR="" \ - FSLMACHINELIST="" \ - FSLREMOTECALL="" \ - FSLGECUDAQ="cuda.q" -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - dc \ - file \ - libfontconfig1 \ - libfreetype6 \ - libgl1-mesa-dev \ - libgl1-mesa-dri \ - libglu1-mesa-dev \ - libgomp1 \ - libice6 \ - libxcursor1 \ - libxft2 \ - libxinerama1 \ - libxrandr2 \ - libxrender1 \ - libxt6 \ - sudo \ - wget \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && echo "Downloading FSL ..." \ - && mkdir -p /opt/fsl-6.0.3 \ - && curl -fsSL --retry 5 https://fsl.fmrib.ox.ac.uk/fsldownloads/fsl-6.0.3-centos6_64.tar.gz \ - | tar -xz -C /opt/fsl-6.0.3 --strip-components 1 \ - && sed -i '$iecho Some packages in this Docker container are non-free' $ND_ENTRYPOINT \ - && sed -i '$iecho If you are considering commercial use of this container, please consult the relevant license:' $ND_ENTRYPOINT \ - && sed -i '$iecho https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/Licence' $ND_ENTRYPOINT \ - && sed -i '$isource $FSLDIR/etc/fslconf/fsl.sh' $ND_ENTRYPOINT \ - && echo "Installing FSL conda environment ..." \ - && bash /opt/fsl-6.0.3/etc/fslconf/fslpython_install.sh -f /opt/fsl-6.0.3 - -ENV PATH="/opt/afni-latest:$PATH" \ - AFNI_PLUGINPATH="/opt/afni-latest" -RUN apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - ed \ - gsl-bin \ - libglib2.0-0 \ - libglu1-mesa-dev \ - libglw1-mesa \ - libgomp1 \ - libjpeg62 \ - libnlopt-dev \ - libxm4 \ - netpbm \ - python \ - python3 \ - r-base \ - r-base-dev \ - tcsh \ - xfonts-base \ - xvfb \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://snapshot.debian.org/archive/debian-security/20160113T213056Z/pool/updates/main/libp/libpng/libpng12-0_1.2.49-1%2Bdeb7u2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && gsl2_path="$(find / -name 'libgsl.so.19' || printf '')" \ - && if [ -n "$gsl2_path" ]; then \ - ln -sfv "$gsl2_path" "$(dirname $gsl2_path)/libgsl.so.0"; \ - fi \ - && ldconfig \ - && echo "Downloading AFNI ..." \ - && mkdir -p /opt/afni-latest \ - && curl -fsSL --retry 5 https://afni.nimh.nih.gov/pub/dist/tgz/linux_openmp_64.tgz \ - | tar -xz -C /opt/afni-latest --strip-components 1 \ - && PATH=$PATH:/opt/afni-latest rPkgsInstall -pkgs ALL - -ENV FORCE_SPMMCR="1" \ - SPM_HTML_BROWSER="0" \ - LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/x86_64-linux-gnu:/opt/matlabmcr-2010a/v713/runtime/glnxa64:/opt/matlabmcr-2010a/v713/bin/glnxa64:/opt/matlabmcr-2010a/v713/sys/os/glnxa64:/opt/matlabmcr-2010a/v713/extern/bin/glnxa64" \ - MATLABCMD="/opt/matlabmcr-2010a/v713/toolbox/matlab" -RUN export TMPDIR="$(mktemp -d)" \ - && apt-get update -qq \ - && apt-get install -y -q --no-install-recommends \ - bc \ - libncurses5 \ - libxext6 \ - libxmu6 \ - libxpm-dev \ - libxt6 \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && echo "Downloading MATLAB Compiler Runtime ..." \ - && curl -sSL --retry 5 -o /tmp/toinstall.deb http://mirrors.kernel.org/debian/pool/main/libx/libxp/libxp6_1.0.2-2_amd64.deb \ - && dpkg -i /tmp/toinstall.deb \ - && rm /tmp/toinstall.deb \ - && apt-get install -f \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* \ - && curl -fsSL --retry 5 -o "$TMPDIR/MCRInstaller.bin" https://dl.dropbox.com/s/zz6me0c3v4yq5fd/MCR_R2010a_glnxa64_installer.bin \ - && chmod +x "$TMPDIR/MCRInstaller.bin" \ - && "$TMPDIR/MCRInstaller.bin" -silent -P installLocation="/opt/matlabmcr-2010a" \ - && rm -rf "$TMPDIR" \ - && unset TMPDIR \ - && echo "Downloading standalone SPM ..." \ - && curl -fsSL --retry 5 -o /tmp/spm12.zip https://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/previous/spm12_r7771_R2010a.zip \ - && unzip -q /tmp/spm12.zip -d /tmp \ - && mkdir -p /opt/spm12-r7771 \ - && mv /tmp/spm12/* /opt/spm12-r7771/ \ - && chmod -R 777 /opt/spm12-r7771 \ - && rm -rf /tmp/spm* \ - && /opt/spm12-r7771/run_spm12.sh /opt/matlabmcr-2010a/v713 quit \ - && sed -i '$iexport SPMMCRCMD=\"/opt/spm12-r7771/run_spm12.sh /opt/matlabmcr-2010a/v713 script\"' $ND_ENTRYPOINT - -RUN test "$(getent passwd neuro)" || useradd --no-user-group --create-home --shell /bin/bash neuro -USER neuro - -WORKDIR /home - -ENV CONDA_DIR="/opt/miniconda-latest" \ - PATH="/opt/miniconda-latest/bin:$PATH" -RUN export PATH="/opt/miniconda-latest/bin:$PATH" \ - && echo "Downloading Miniconda installer ..." \ - && conda_installer="/tmp/miniconda.sh" \ - && curl -fsSL --retry 5 -o "$conda_installer" https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh \ - && bash "$conda_installer" -b -p /opt/miniconda-latest \ - && rm -f "$conda_installer" \ - && conda update -yq -nbase conda \ - && conda config --system --prepend channels conda-forge \ - && conda config --system --set auto_update_conda false \ - && conda config --system --set show_channel_urls true \ - && sync && conda clean -y --all && sync \ - && conda create -y -q --name neuro \ - && conda install -y -q --name neuro \ - "python=3.8" \ - "traits" \ - "jupyter" \ - "nilearn" \ - "graphviz" \ - "nipype" \ - "scikit-image" \ - && sync && conda clean -y --all && sync \ - && bash -c "source activate neuro \ - && pip install --no-cache-dir \ - "matplotlib"" \ - && rm -rf ~/.cache/pip/* \ - && sync \ - && sed -i '$isource activate neuro' $ND_ENTRYPOINT - -ENV LD_LIBRARY_PATH="/opt/miniconda-latest/envs/neuro:" - -RUN bash -c 'source activate neuro' - -USER root - -RUN chmod 777 -Rf /home - -RUN chown -R neuro /home - -USER neuro - -RUN mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py - -RUN echo '{ \ - \n "pkg_manager": "apt", \ - \n "instructions": [ \ - \n [ \ - \n "base", \ - \n "neurodebian:stretch-non-free" \ - \n ], \ - \n [ \ - \n "install", \ - \n [ \ - \n "git" \ - \n ] \ - \n ], \ - \n [ \ - \n "fsl", \ - \n { \ - \n "version": "6.0.3" \ - \n } \ - \n ], \ - \n [ \ - \n "afni", \ - \n { \ - \n "version": "latest", \ - \n "method": "binaries", \ - \n "install_r": "true", \ - \n "install_r_pkgs": "true", \ - \n "install_python2": "true", \ - \n "install_python3": "true" \ - \n } \ - \n ], \ - \n [ \ - \n "spm12", \ - \n { \ - \n "version": "r7771", \ - \n "method": "binaries" \ - \n } \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "workdir", \ - \n "/home" \ - \n ], \ - \n [ \ - \n "miniconda", \ - \n { \ - \n "create_env": "neuro", \ - \n "conda_install": [ \ - \n "python=3.8", \ - \n "traits", \ - \n "jupyter", \ - \n "nilearn", \ - \n "graphviz", \ - \n "nipype", \ - \n "scikit-image" \ - \n ], \ - \n "pip_install": [ \ - \n "matplotlib" \ - \n ], \ - \n "activate": true \ - \n } \ - \n ], \ - \n [ \ - \n "env", \ - \n { \ - \n "LD_LIBRARY_PATH": "/opt/miniconda-latest/envs/neuro:" \ - \n } \ - \n ], \ - \n [ \ - \n "run_bash", \ - \n "source activate neuro" \ - \n ], \ - \n [ \ - \n "user", \ - \n "root" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chmod 777 -Rf /home" \ - \n ], \ - \n [ \ - \n "run", \ - \n "chown -R neuro /home" \ - \n ], \ - \n [ \ - \n "user", \ - \n "neuro" \ - \n ], \ - \n [ \ - \n "run", \ - \n "mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \\\"0.0.0.0\\\" > ~/.jupyter/jupyter_notebook_config.py" \ - \n ] \ - \n ] \ - \n}' > /neurodocker/neurodocker_specs.json +RUN /neurodocker/startup.sh pip install . diff --git a/docs/data.md b/docs/data.md index b619bf13..82821d8a 100644 --- a/docs/data.md +++ b/docs/data.md @@ -20,7 +20,7 @@ Tips for people using M1 MacBooks: `git-annex` is not yet available for M1 MacBo The `datalad install` command only downloaded the metadata associated with the dataset ; to download the actual files run the following command: -> [! WARNING] +> [!WARNING] > The following command lines will download **all** the data, which represents around : > * 3 GB for `data/results/` > * 880 GB for `data/original/` diff --git a/docs/environment.md b/docs/environment.md index 31c029b4..07b801e8 100644 --- a/docs/environment.md +++ b/docs/environment.md @@ -19,6 +19,25 @@ docker run -it \ nipype/nipype:py38 ``` +> [!NOTE] +> The `Dockerfile` available in the root directory of the repository contains a recipe to build a docker image with the project installed, ready to be launched. +> From the root directory: +> +> ```bash +> # Replace IMAGE_NAME in the following command +> docker build -t IMAGE_NAME . +> +> # Check the image was created +> docker images +> +> # Use IMAGE_NAME again to refer to the newly created image +> docker run -it IMAGE_NAME +> ``` + +> [!TIP] +> Find useful information on the [Docker documentation page](https://docs.docker.com/get-started/). Here is a [cheat sheet with Docker commands](https://docs.docker.com/get-started/docker_cheatsheet.pdf) + + ## Use Jupyter with the container If you wish to use [Jupyter](https://jupyter.org/) to run the code, a port forwarding is needed : @@ -43,7 +62,7 @@ You can now access Jupyter using the address provided by the command line. ## Create a custom Docker image -The `elodiegermani/open_pipeline` Docker image is based on [Neurodocker](https://github.com/ReproNim/neurodocker). It was created using the following command line : +If you wish to create your own custom environment, make changes to the parameters, software versions, you can build your custom image using [Neurodocker](https://github.com/ReproNim/neurodocker). Generate a Dockerfile using the following command line : ```bash docker run --rm repronim/neurodocker:0.7.0 generate docker \ @@ -67,7 +86,7 @@ docker run --rm repronim/neurodocker:0.7.0 generate docker \ --run 'mkdir -p ~/.jupyter && echo c.NotebookApp.ip = \"0.0.0.0\" > ~/.jupyter/jupyter_notebook_config.py' > Dockerfile ``` -If you wish to create your own custom environment, make changes to the parameters, and build your custom image from the generated Dockerfile. +And build a new image from the Dockerfile. ```bash # Replace IMAGE_NAME in the following command diff --git a/environment.yml b/environment.yml deleted file mode 100644 index e2fa4b95..00000000 --- a/environment.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: narps_open_pipelines -channels: - - conda-forge - - defaults -dependencies: - - python=3.8.0 - - pip - - traits - - jupyter - - nilearn - - graphviz - - nipype - - scikit-image - - pip: - - matplotlib diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 7da1f960..00000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length = 100