From 8f50617e51e7eec40c8ff9f0d0ec625224055fff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Boris=20Cl=C3=A9net?= <117362283+bclenet@users.noreply.github.com> Date: Mon, 19 Feb 2024 11:37:03 +0100 Subject: [PATCH] Documentation changes after Empenn Hackathon (#176) * [BUG] inside unit_tests workflow * [DOC] fix some broken links * [DOC] adding template for pipeline testing * [DOC] adding template for pipeline testing * About implemented_pipelines * Deal with test template * [DOC] new readme for the doc * Changes in README.md * [DOC] slight changes to docs/README.md * Add links to past events * Changes in readme.md * fMRI trail * Adding trail description in contribution guide * Separate trails in contribution guide * [TEST] Solving pytest issues with template test * Changing docker image in use * FSL template correction * [DOC] writing test files * Codespell * First step in writing documentation about NARPS * [DOC] completing doc about narps * [DOC] completing doc about narps * [DOC] completing doc about narps * [DATALAD] change results url * [DOC] reference to the github project for reproduction mgmt * [DOC] adding team id choices for narps open runner * [DOC] list of available team ids in command tools documentation * [DOC] configuration info inside INSTALL.md * [DOC] configuration info inside INSTALL.md * NARPS Exclusion comments * Empenn hackathon names * Data documentation (datalad get recursive * Adjusting correlation thresholds inside testing configuration, after U26C results * Update dataset size * Freeze versions --- INSTALL.md | 7 +++-- README.md | 3 +- docs/data.md | 7 ++++- docs/environment.md | 10 +++---- narps_open/pipelines/__main__.py | 30 +++++++++++++++++++ .../utils/configuration/testing_config.toml | 2 +- setup.py | 18 +++++------ 7 files changed, 57 insertions(+), 20 deletions(-) create mode 100644 narps_open/pipelines/__main__.py diff --git a/INSTALL.md b/INSTALL.md index f10a4922..18de2747 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -43,7 +43,7 @@ datalad get data/original/ds001734/derivatives/fmriprep/sub-00[1-4] -J 12 [Install Docker](https://docs.docker.com/engine/install/) then pull the nipype Docker image : ```bash -docker pull nipype/nipype +docker pull nipype/nipype:py38 ``` Once it's done you can check the image is available on your system : @@ -51,7 +51,7 @@ Once it's done you can check the image is available on your system : ```bash docker images REPOSITORY TAG IMAGE ID CREATED SIZE - docker.io/nipype/nipype latest 0f3c74d28406 9 months ago 22.7 GB + docker.io/nipype/nipype py38 0f3c74d28406 9 months ago 22.7 GB ``` > [!NOTE] @@ -63,10 +63,11 @@ Start a Docker container from the Docker image : ```bash # Replace PATH_TO_THE_REPOSITORY in the following command (e.g.: with /home/user/dev/narps_open_pipelines/) -docker run -it -v PATH_TO_THE_REPOSITORY:/home/neuro/code/ nipype/nipype +docker run -it -v PATH_TO_THE_REPOSITORY:/home/neuro/code/ nipype/nipype:py38 ``` Optionally edit the configuration file `narps_open/utils/configuration/default_config.toml` so that the referred paths match the ones inside the container. E.g.: if using the previous command line, the `directories` part of the configuration file should be : + ```toml # default_config.toml # ... diff --git a/README.md b/README.md index a1eee7a6..3eae7353 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,8 @@ This project is supported by Région Bretagne (Boost MIND) and by Inria (Explora This project is developed in the Empenn team by Boris Clénet, Elodie Germani, Jeremy Lefort-Besnard and Camille Maumet with contributions by Rémi Gau. In addition, this project was presented and received contributions during the following events: - - [Brainhack Marseille 2023](https://brainhack-marseille.github.io/) (December 2023): + - [Empenn team](https://team.inria.fr/empenn/) hackathon (February 2024): Mathieu Acher, Élise Bannier, Boris Clénet, Isabelle Corouge, Malo Gaubert, Élodie Germani, Gauthier Le Bartz Lyan, Jérémy Lefort-Besnard, Camille Maumet, Youenn Merel, Alexandre Pron. + - [Brainhack Marseille 2023](https://brainhack-marseille.github.io/) (December 2023) - [ORIGAMI lab](https://neurodatascience.github.io/) hackathon (September 2023): - [OHBM Brainhack 2023](https://ohbm.github.io/hackathon2023/) (July 2023): Arshitha Basavaraj, Boris Clénet, Rémi Gau, Élodie Germani, Yaroslav Halchenko, Camille Maumet, Paul Taylor. - [e-ReproNim FENS NENS Cluster Brainhack](https://repro.school/2023-e-repronim-brainhack/) (June 2023) : Liz Bushby, Boris Clénet, Michael Dayan, Aimee Westbrook. diff --git a/docs/data.md b/docs/data.md index 3a68b32e..b619bf13 100644 --- a/docs/data.md +++ b/docs/data.md @@ -20,10 +20,15 @@ Tips for people using M1 MacBooks: `git-annex` is not yet available for M1 MacBo The `datalad install` command only downloaded the metadata associated with the dataset ; to download the actual files run the following command: +> [! WARNING] +> The following command lines will download **all** the data, which represents around : +> * 3 GB for `data/results/` +> * 880 GB for `data/original/` + ```bash # To get all the data cd data/ -datalad get ./* +datalad get --recursive ./* ``` If you only want parts of the data, replace the `./*` by the paths to the desired files. diff --git a/docs/environment.md b/docs/environment.md index 00442421..a345e94f 100644 --- a/docs/environment.md +++ b/docs/environment.md @@ -2,12 +2,12 @@ ## The Docker container :whale: -The NARPS Open Pipelines project is build upon several dependencies, such as [Nipype](https://nipype.readthedocs.io/en/latest/) but also the original software packages used by the pipelines (SPM, FSL, AFNI...). Therefore we recommend to use the [`nipype/nipype` Docker image](https://hub.docker.com/r/nipype/nipype/) that contains all the required software dependencies. +The NARPS Open Pipelines project is build upon several dependencies, such as [Nipype](https://nipype.readthedocs.io/en/latest/) but also the original software packages used by the pipelines (SPM, FSL, AFNI...). Therefore we recommend to use the [`nipype/nipype:py38` Docker image](https://hub.docker.com/r/nipype/nipype/) that contains all the required software dependencies. The simplest way to start the container is by using the command below : ```bash -docker run -it nipype/nipype +docker run -it nipype/nipype:py38 ``` From this command line, you need to add volumes to be able to link with your local files (code repository). @@ -16,7 +16,7 @@ From this command line, you need to add volumes to be able to link with your loc # Replace PATH_TO_THE_REPOSITORY in the following command (e.g.: with /home/user/dev/narps_open_pipelines/) docker run -it \ -v PATH_TO_THE_REPOSITORY:/home/neuro/code/ \ - nipype/nipype + nipype/nipype:py38 ``` ## Use Jupyter with the container @@ -27,7 +27,7 @@ If you wish to use [Jupyter](https://jupyter.org/) to run the code, a port forwa docker run -it \ -v PATH_TO_THE_REPOSITORY:/home/neuro/code/ \ -p 8888:8888 \ - nipype/nipype + nipype/nipype:py38 ``` Then, from inside the container : @@ -81,7 +81,7 @@ To use SPM inside the container, use this command at the beginning of your scrip ```python from nipype.interfaces import spm -matlab_cmd = '/opt/spm12-r7771/run_spm12.sh /opt/matlabmcr-2010a/v713/ script' +matlab_cmd = '/opt/spm12-r7219/run_spm12.sh /opt/matlabmcr-2010a/v713/ script' spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True) ``` diff --git a/narps_open/pipelines/__main__.py b/narps_open/pipelines/__main__.py new file mode 100644 index 00000000..60fd5c76 --- /dev/null +++ b/narps_open/pipelines/__main__.py @@ -0,0 +1,30 @@ +#!/usr/bin/python +# coding: utf-8 + +""" Provide a command-line interface for the package narps_open.pipelines """ + +from argparse import ArgumentParser + +from narps_open.pipelines import get_implemented_pipelines + +def main(): + """ Entry-point for the command line tool narps_open_pipeline """ + + # Parse arguments + parser = ArgumentParser(description='Get description of a NARPS pipeline.') + parser.add_argument('-v', '--verbose', action='store_true', + help='verbose mode') + arguments = parser.parse_args() + + # Print header + print('NARPS Open Pipelines') + + # Print general information about NARS Open Pipelines + print('A codebase reproducing the 70 pipelines of the NARPS study (Botvinik-Nezer et al., 2020) shared as an open resource for the community.') + + # Print pipelines + implemented_pipelines = get_implemented_pipelines() + print(f'There are currently {len(implemented_pipelines)} implemented pipelines: {implemented_pipelines}') + +if __name__ == '__main__': + main() diff --git a/narps_open/utils/configuration/testing_config.toml b/narps_open/utils/configuration/testing_config.toml index b5374183..86ba77b8 100644 --- a/narps_open/utils/configuration/testing_config.toml +++ b/narps_open/utils/configuration/testing_config.toml @@ -23,4 +23,4 @@ neurovault_naming = true # true if results files are saved using the neurovault [testing.pipelines] nb_subjects_per_group = 4 # Compute first level analyses by subgroups of N subjects, to avoid lacking of disk and memory -correlation_thresholds = [0.30, 0.70, 0.79, 0.85, 0.93] # Correlation between reproduced hypotheses files and results, respectively for [20, 40, 60, 80, 108] subjects. +correlation_thresholds = [0.30, 0.70, 0.78, 0.85, 0.93] # Correlation between reproduced hypotheses files and results, respectively for [20, 40, 60, 80, 108] subjects. diff --git a/setup.py b/setup.py index 185c8418..ec22904f 100644 --- a/setup.py +++ b/setup.py @@ -18,18 +18,18 @@ 'tomli>=2.0.1,<2.1', 'networkx>=2.0,<3.0', # a workaround to nipype's bug (issue 3530) 'nilearn>=0.10.0,<0.11', - 'nipype', - 'pandas' + 'nipype>=1.8.6,<1.9', + 'pandas>=1.5.2,<1.6' ] extras_require = { 'tests': [ - 'pathvalidate', - 'pylint', - 'pytest', - 'pytest-cov', - 'pytest-helpers-namespace', - 'pytest-mock', - 'checksumdir' + 'pathvalidate>=3.2.0,<3.3', + 'pylint>=3.0.3,<3.1', + 'pytest>=7.2.0,<7.3', + 'pytest-cov>=2.10.1,<2.11', + 'pytest-helpers-namespace>=2021.12.29,<2021.13', + 'pytest-mock>=3.12.0,<3.13', + 'checksumdir>=1.2.0,<1.3' ] }