diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 76842576a..13691cfa3 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,3 +1,7 @@ +# 2022-09-22 - goncalves.mathias@gmail.com - STY: black/isort the docker wrapper +9976458388f369cba4b7d81359acc40b52f6621c +# 2022-09-22 - goncalves.mathias@gmail.com - STY: Apply black/isort to codebase +7eedab8b29497cee1d8dd540c2266e740f484a86 # 2021-11-05 - markiewicz@stanford.edu - STY: Update black config d2ad20301306f283d504ec7b5a1bd73ce58c2b11 # 2021-09-22 - code@oscaresteban.es - sty: run black diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml index 5e799822b..092e1d524 100644 --- a/.github/workflows/contrib.yml +++ b/.github/workflows/contrib.yml @@ -23,19 +23,20 @@ jobs: python-version: [3.9] steps: - - uses: actions/checkout@v2 - with: - submodules: recursive - fetch-depth: 0 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Display Python version run: python -c "import sys; print(sys.version)" - - name: Install flake8 - run: python -m pip install flake8 + - name: Install black/isort + run: python -m pip install black isort - name: Check fMRIPrep - run: python -m flake8 fmriprep + run: | + python -m black --check fmriprep + python -m isort --check fmriprep - name: Check wrapper - run: python -m flake8 wrapper + run: | + python -m black --check wrapper + python -m isort --check wrapper diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..090d8068b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + files: ^fmriprep/ +- repo: https://github.com/pycqa/isort + rev: 5.10.1 + hooks: + - id: isort + files: ^fmriprep/ diff --git a/fmriprep/__about__.py b/fmriprep/__about__.py index 55ff408d0..3c0dbdb92 100644 --- a/fmriprep/__about__.py +++ b/fmriprep/__about__.py @@ -22,15 +22,18 @@ # """Base module variables.""" from ._version import get_versions + __version__ = get_versions()['version'] del get_versions __packagename__ = 'fmriprep' __copyright__ = 'Copyright 2022, The NiPreps Developers' -__credits__ = ('Contributors: please check the ``.zenodo.json`` file at the top-level folder' - 'of the repository') +__credits__ = ( + 'Contributors: please check the ``.zenodo.json`` file at the top-level folder' + 'of the repository' +) __url__ = 'https://github.com/nipreps/fmriprep' -DOWNLOAD_URL = ( - 'https://github.com/nipreps/{name}/archive/{ver}.tar.gz'.format( - name=__packagename__, ver=__version__)) +DOWNLOAD_URL = 'https://github.com/nipreps/{name}/archive/{ver}.tar.gz'.format( + name=__packagename__, ver=__version__ +) diff --git a/fmriprep/__init__.py b/fmriprep/__init__.py index dd0901e90..f1dc0e1c3 100644 --- a/fmriprep/__init__.py +++ b/fmriprep/__init__.py @@ -2,12 +2,7 @@ # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-module metadata.""" -from .__about__ import ( - __copyright__, - __credits__, - __packagename__, - __version__, -) +from .__about__ import __copyright__, __credits__, __packagename__, __version__ __all__ = [ '__copyright__', @@ -19,8 +14,9 @@ # Silence PyBIDS warning for extension entity behavior # Can be removed once minimum PyBIDS dependency hits 0.14 try: - from packaging.version import Version import bids + from packaging.version import Version + if Version(bids.__version__) < Version('0.14'): bids.config.set_option('extension_initial_dot', True) except (ImportError, ValueError): diff --git a/fmriprep/__main__.py b/fmriprep/__main__.py index b45236455..216e6d230 100644 --- a/fmriprep/__main__.py +++ b/fmriprep/__main__.py @@ -24,7 +24,9 @@ if __name__ == '__main__': import sys + from . import __name__ as module + # `python -m ` typically displays the command as __main__.py if '__main__.py' in sys.argv[0]: sys.argv[0] = '%s -m %s' % (sys.executable, module) diff --git a/fmriprep/_warnings.py b/fmriprep/_warnings.py index aa39d8f61..0d638de79 100644 --- a/fmriprep/_warnings.py +++ b/fmriprep/_warnings.py @@ -21,8 +21,8 @@ # https://www.nipreps.org/community/licensing/ # """Manipulate Python warnings.""" -import warnings import logging +import warnings _wlog = logging.getLogger("py.warnings") _wlog.addHandler(logging.NullHandler()) @@ -34,9 +34,7 @@ def _warn(message, category=None, stacklevel=1, source=None): category = type(category).__name__ category = category.replace("type", "WARNING") - logging.getLogger("py.warnings").warning( - f"{category or 'WARNING'}: {message}" - ) + logging.getLogger("py.warnings").warning(f"{category or 'WARNING'}: {message}") def _showwarning(message, category, filename, lineno, file=None, line=None): diff --git a/fmriprep/cli/parser.py b/fmriprep/cli/parser.py index f4afaa843..7f9ce9f69 100644 --- a/fmriprep/cli/parser.py +++ b/fmriprep/cli/parser.py @@ -22,6 +22,7 @@ # """Parser.""" import sys + from .. import config @@ -30,15 +31,14 @@ def _build_parser(**kwargs): ``kwargs`` are passed to ``argparse.ArgumentParser`` (mainly useful for debugging). """ + from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser from functools import partial from pathlib import Path - from argparse import ( - ArgumentParser, - ArgumentDefaultsHelpFormatter, - ) + + from niworkflows.utils.spaces import OutputReferencesAction, Reference from packaging.version import Version + from .version import check_latest, is_flagged - from niworkflows.utils.spaces import Reference, OutputReferencesAction def _path_exists(path, parser): """Ensure a given path exists.""" @@ -61,9 +61,9 @@ def _min_one(value, parser): return value def _to_gb(value): - scale = {"G": 1, "T": 10 ** 3, "M": 1e-3, "K": 1e-6, "B": 1e-9} + scale = {"G": 1, "T": 10**3, "M": 1e-3, "K": 1e-6, "B": 1e-9} digits = "".join([c for c in value if c.isdigit()]) - units = value[len(digits):] or "M" + units = value[len(digits) :] or "M" return int(digits) * scale[units[0]] def _drop_sub(value): @@ -71,15 +71,14 @@ def _drop_sub(value): def _filter_pybids_none_any(dct): import bids + return { - k: bids.layout.Query.NONE - if v is None - else (bids.layout.Query.ANY if v == "*" else v) + k: bids.layout.Query.NONE if v is None else (bids.layout.Query.ANY if v == "*" else v) for k, v in dct.items() } def _bids_filter(value, parser): - from json import loads, JSONDecodeError + from json import JSONDecodeError, loads if value: if Path(value).exists(): @@ -98,17 +97,16 @@ def _slice_time_ref(value, parser): try: value = float(value) except ValueError: - raise parser.error("Slice time reference must be number, 'start', or 'middle'. " - f"Received {value}.") + raise parser.error( + "Slice time reference must be number, 'start', or 'middle'. " f"Received {value}." + ) if not 0 <= value <= 1: raise parser.error(f"Slice time reference must be in range 0-1. Received {value}.") return value verstr = f"fMRIPrep v{config.environment.version}" currentv = Version(config.environment.version) - is_release = not any( - (currentv.is_devrelease, currentv.is_prerelease, currentv.is_postrelease) - ) + is_release = not any((currentv.is_devrelease, currentv.is_prerelease, currentv.is_postrelease)) parser = ArgumentParser( description="fMRIPrep: fMRI PREProcessing workflows v{}".format( @@ -206,7 +204,7 @@ def _slice_time_ref(value, parser): metavar="PATH", type=Path, help="Path to a PyBIDS database folder, for faster indexing (especially " - "useful for large datasets). Will be created if not present." + "useful for large datasets). Will be created if not present.", ) g_perfm = parser.add_argument_group("Options to handle performance") @@ -239,8 +237,7 @@ def _slice_time_ref(value, parser): g_perfm.add_argument( "--low-mem", action="store_true", - help="attempt to reduce memory usage (will increase disk usage " - "in working directory)", + help="attempt to reduce memory usage (will increase disk usage " "in working directory)", ) g_perfm.add_argument( "--use-plugin", @@ -250,9 +247,7 @@ def _slice_time_ref(value, parser): type=IsFile, help="nipype plugin configuration file", ) - g_perfm.add_argument( - "--anat-only", action="store_true", help="run anatomical workflows only" - ) + g_perfm.add_argument("--anat-only", action="store_true", help="run anatomical workflows only") g_perfm.add_argument( "--boilerplate_only", action="store_true", @@ -321,7 +316,7 @@ def _slice_time_ref(value, parser): default=False, help="""\ Output individual echo time series with slice, motion and susceptibility correction. \ -Useful for further Tedana processing post-fMRIPrep.""" +Useful for further Tedana processing post-fMRIPrep.""", ) g_conf.add_argument( @@ -370,9 +365,9 @@ def _slice_time_ref(value, parser): default=None, type=SliceTimeRef, help="The time of the reference slice to correct BOLD values to, as a fraction " - "acquisition time. 0 indicates the start, 0.5 the midpoint, and 1 the end " - "of acquisition. The alias `start` corresponds to 0, and `middle` to 0.5. " - "The default value is 0.5.", + "acquisition time. 0 indicates the start, 0.5 the midpoint, and 1 the end " + "of acquisition. The alias `start` corresponds to 0, and `middle` to 0.5. " + "The default value is 0.5.", ) g_conf.add_argument( "--dummy-scans", @@ -438,8 +433,7 @@ def _slice_time_ref(value, parser): action="store", default=1.5, type=float, - help="Threshold for flagging a frame as an outlier on the basis of standardised " - "DVARS", + help="Threshold for flagging a frame as an outlier on the basis of standardised " "DVARS", ) # ANTs options @@ -499,7 +493,7 @@ def _slice_time_ref(value, parser): const="error", default=False, help="EXPERIMENTAL: Use fieldmap-free distortion correction; " - "if unable, error (default) or warn based on optional argument.", + "if unable, error (default) or warn based on optional argument.", ) g_syn.add_argument( "--force-syn", @@ -562,7 +556,7 @@ def _slice_time_ref(value, parser): help="Organization of outputs. bids (default) places fMRIPrep derivatives " "directly in the output directory, and defaults to placing FreeSurfer " "derivatives in /sourcedata/freesurfer. legacy creates " - "derivative datasets as subdirectories of outputs." + "derivative datasets as subdirectories of outputs.", ) g_other.add_argument( "-w", @@ -597,7 +591,8 @@ def _slice_time_ref(value, parser): action="store", metavar="FILE", help="Use pre-generated configuration file. Values in file will be overridden " - "by command-line arguments.") + "by command-line arguments.", + ) g_other.add_argument( "--write-graph", action="store_true", @@ -608,8 +603,7 @@ def _slice_time_ref(value, parser): "--stop-on-first-crash", action="store_true", default=False, - help="Force stopping on first crash, even if a work directory" - " was specified.", + help="Force stopping on first crash, even if a work directory" " was specified.", ) g_other.add_argument( "--notrack", @@ -665,6 +659,7 @@ def _slice_time_ref(value, parser): def parse_args(args=None, namespace=None): """Parse args and run further checks on the command line.""" import logging + from niworkflows.utils.spaces import Reference, SpatialReferences parser = _build_parser() @@ -680,6 +675,7 @@ def parse_args(args=None, namespace=None): if not config.execution.notrack: import pkgutil + if pkgutil.find_loader("sentry_sdk") is None: config.execution.notrack = True config.loggers.cli.warning("Telemetry disabled because sentry_sdk is not installed.") @@ -759,9 +755,7 @@ def parse_args(args=None, namespace=None): build_log.info(f"Clearing previous fMRIPrep working directory: {work_dir}") if not clean_directory(work_dir): - build_log.warning( - f"Could not clear all contents of working directory: {work_dir}" - ) + build_log.warning(f"Could not clear all contents of working directory: {work_dir}") # Update the config with an empty dict to trigger initialization of all config # sections (we used `init=False` above). @@ -793,9 +787,7 @@ def parse_args(args=None, namespace=None): "Making sure the input data is BIDS compliant (warnings can be ignored in most " "cases)." ) - validate_input_dir( - config.environment.exec_env, opts.bids_dir, opts.participant_label - ) + validate_input_dir(config.environment.exec_env, opts.bids_dir, opts.participant_label) # Setup directories config.execution.log_dir = config.execution.fmriprep_dir / "logs" diff --git a/fmriprep/cli/run.py b/fmriprep/cli/run.py index 58b609c3b..48bbddf8a 100755 --- a/fmriprep/cli/run.py +++ b/fmriprep/cli/run.py @@ -27,19 +27,21 @@ def main(): """Entry point.""" + import gc + import sys + from multiprocessing import Manager, Process from os import EX_SOFTWARE from pathlib import Path - import sys - import gc - from multiprocessing import Process, Manager + + from ..utils.bids import write_bidsignore, write_derivative_description from .parser import parse_args - from ..utils.bids import write_derivative_description, write_bidsignore parse_args() sentry_sdk = None if not config.execution.notrack: import sentry_sdk + from ..utils.sentry import sentry_setup sentry_setup() @@ -104,9 +106,7 @@ def main(): config.loggers.workflow.log( 15, - "\n".join( - ["fMRIPrep config:"] + ["\t\t%s" % s for s in config.dumps().splitlines()] - ), + "\n".join(["fMRIPrep config:"] + ["\t\t%s" % s for s in config.dumps().splitlines()]), ) config.loggers.workflow.log(25, "fMRIPrep started!") errno = 1 # Default is error exit unless otherwise set @@ -156,21 +156,18 @@ def main(): ) if config.workflow.run_reconall: - from templateflow import api from niworkflows.utils.misc import _copy_any + from templateflow import api dseg_tsv = str(api.get("fsaverage", suffix="dseg", extension=[".tsv"])) - _copy_any( - dseg_tsv, str(config.execution.fmriprep_dir / "desc-aseg_dseg.tsv") - ) - _copy_any( - dseg_tsv, str(config.execution.fmriprep_dir / "desc-aparcaseg_dseg.tsv") - ) + _copy_any(dseg_tsv, str(config.execution.fmriprep_dir / "desc-aseg_dseg.tsv")) + _copy_any(dseg_tsv, str(config.execution.fmriprep_dir / "desc-aparcaseg_dseg.tsv")) errno = 0 finally: - from fmriprep.reports.core import generate_reports from pkg_resources import resource_filename as pkgrf + from fmriprep.reports.core import generate_reports + # Generate reports phase failed_reports = generate_reports( config.execution.participant_label, @@ -179,9 +176,7 @@ def main(): config=pkgrf("fmriprep", "data/reports-spec.yml"), packagename="fmriprep", ) - write_derivative_description( - config.execution.bids_dir, config.execution.fmriprep_dir - ) + write_derivative_description(config.execution.bids_dir, config.execution.fmriprep_dir) write_bidsignore(config.execution.fmriprep_dir) if failed_reports and not config.execution.notrack: diff --git a/fmriprep/cli/tests/test_parser.py b/fmriprep/cli/tests/test_parser.py index 77b0e90d6..044d02cf0 100644 --- a/fmriprep/cli/tests/test_parser.py +++ b/fmriprep/cli/tests/test_parser.py @@ -21,15 +21,17 @@ # https://www.nipreps.org/community/licensing/ # """Test parser.""" -from packaging.version import Version -from pkg_resources import resource_filename as pkgrf from argparse import ArgumentError from contextlib import nullcontext + import pytest -from ..parser import _build_parser, parse_args -from .. import version as _version +from packaging.version import Version +from pkg_resources import resource_filename as pkgrf + from ... import config from ...tests.test_config import _reset_config +from .. import version as _version +from ..parser import _build_parser, parse_args MIN_ARGS = ["data/", "out/", "participant"] @@ -123,9 +125,7 @@ def _mock_check_latest(*args, **kwargs): assert (msg in captured) is expectation -@pytest.mark.parametrize( - "flagged", [(True, None), (True, "random reason"), (False, None)] -) +@pytest.mark.parametrize("flagged", [(True, None), (True, "random reason"), (False, None)]) def test_get_parser_blacklist(monkeypatch, capsys, flagged): """Make sure the blacklisting banner is shown.""" @@ -149,9 +149,16 @@ def test_parse_args(tmp_path): out_dir = tmp_path / "out" work_dir = tmp_path / "work" - parse_args(args=[bids_dir, str(out_dir), "participant", # BIDS App - "-w", str(work_dir), # Don't pollute CWD - "--skip-bids-validation"]) # Empty files make BIDS sad + parse_args( + args=[ + bids_dir, + str(out_dir), + "participant", # BIDS App + "-w", + str(work_dir), # Don't pollute CWD + "--skip-bids-validation", # Empty files make BIDS sad + ] + ) assert config.execution.layout.root == bids_dir _reset_config() @@ -160,8 +167,7 @@ def test_bids_filter_file(tmp_path, capsys): bids_path = tmp_path / "data" out_path = tmp_path / "out" bff = tmp_path / "filter.json" - args = [str(bids_path), str(out_path), "participant", - "--bids-filter-file", str(bff)] + args = [str(bids_path), str(out_path), "participant", "--bids-filter-file", str(bff)] bids_path.mkdir() parser = _build_parser() @@ -197,13 +203,16 @@ def test_slice_time_ref(tmp_path, st_ref): _reset_config() -@pytest.mark.parametrize("args, expectation", ( - ([], False), - (["--use-syn-sdc"], "error"), - (["--use-syn-sdc", "error"], "error"), - (["--use-syn-sdc", "warn"], "warn"), - (["--use-syn-sdc", "other"], (SystemExit, ArgumentError)), -)) +@pytest.mark.parametrize( + "args, expectation", + ( + ([], False), + (["--use-syn-sdc"], "error"), + (["--use-syn-sdc", "error"], "error"), + (["--use-syn-sdc", "warn"], "warn"), + (["--use-syn-sdc", "other"], (SystemExit, ArgumentError)), + ), +) def test_use_syn_sdc(tmp_path, args, expectation): bids_path = tmp_path / "data" out_path = tmp_path / "out" diff --git a/fmriprep/cli/tests/test_version.py b/fmriprep/cli/tests/test_version.py index fa210879a..263de0fc0 100644 --- a/fmriprep/cli/tests/test_version.py +++ b/fmriprep/cli/tests/test_version.py @@ -21,22 +21,22 @@ # https://www.nipreps.org/community/licensing/ # """Test version checks.""" -from os import getenv from datetime import datetime +from os import getenv from pathlib import Path -from packaging.version import Version + import pytest +from packaging.version import Version + from .. import version as _version -from ..version import check_latest, DATE_FMT, requests, is_flagged +from ..version import DATE_FMT, check_latest, is_flagged, requests class MockResponse: """Mocks the requests module so that Pypi is not actually queried.""" status_code = 200 - _json = { - "releases": {"1.0.0": None, "1.0.1": None, "1.1.0": None, "1.1.1rc1": None} - } + _json = {"releases": {"1.0.0": None, "1.0.1": None, "1.1.0": None, "1.1.1rc1": None}} def __init__(self, code=200, json=None): """Allow setting different response codes.""" @@ -200,9 +200,7 @@ def mock_get(*args, **kwargs): def test_readonly(tmp_path, monkeypatch): """Test behavior when $HOME/.cache/fmriprep/latest can't be written out.""" - home_path = ( - Path("/home/readonly") if getenv("TEST_READONLY_FILESYSTEM") else tmp_path - ) + home_path = Path("/home/readonly") if getenv("TEST_READONLY_FILESYSTEM") else tmp_path monkeypatch.setenv("HOME", str(home_path)) cachedir = home_path / ".cache" diff --git a/fmriprep/cli/version.py b/fmriprep/cli/version.py index ac267ffee..6bf909da5 100644 --- a/fmriprep/cli/version.py +++ b/fmriprep/cli/version.py @@ -22,9 +22,11 @@ # """Version CLI helpers.""" -from pathlib import Path from datetime import datetime +from pathlib import Path + import requests + from .. import __version__ RELEASE_EXPIRY_DAYS = 14 @@ -33,7 +35,7 @@ def check_latest(): """Determine whether this is the latest version.""" - from packaging.version import Version, InvalidVersion + from packaging.version import InvalidVersion, Version latest = None date = None @@ -61,9 +63,7 @@ def check_latest(): if latest is None or outdated is True: try: - response = requests.get( - url="https://pypi.org/pypi/fmriprep/json", timeout=1.0 - ) + response = requests.get(url="https://pypi.org/pypi/fmriprep/json", timeout=1.0) except Exception: response = None @@ -77,9 +77,7 @@ def check_latest(): if cachefile is not None and latest is not None: try: - cachefile.write_text( - "|".join(("%s" % latest, datetime.now().strftime(DATE_FMT))) - ) + cachefile.write_text("|".join(("%s" % latest, datetime.now().strftime(DATE_FMT)))) except Exception: pass diff --git a/fmriprep/cli/workflow.py b/fmriprep/cli/workflow.py index 37b7f531f..eef124fdb 100644 --- a/fmriprep/cli/workflow.py +++ b/fmriprep/cli/workflow.py @@ -35,10 +35,12 @@ def build_workflow(config_file, retval): """Create the Nipype Workflow that supports the whole execution graph.""" from pathlib import Path - from pkg_resources import resource_filename as pkgrf - from niworkflows.utils.bids import collect_participants, check_pipeline_version - from niworkflows.utils.misc import check_valid_fs_license + from niworkflows.reports import generate_reports + from niworkflows.utils.bids import check_pipeline_version, collect_participants + from niworkflows.utils.misc import check_valid_fs_license + from pkg_resources import resource_filename as pkgrf + from .. import config from ..utils.misc import check_deps from ..workflows.base import init_fmriprep_wf @@ -82,9 +84,7 @@ def build_workflow(config_file, retval): # Called with reports only if config.execution.reports_only: - build_log.log( - 25, "Running --reports-only on participants %s", ", ".join(subject_list) - ) + build_log.log(25, "Running --reports-only on participants %s", ", ".join(subject_list)) retval["return_code"] = generate_reports( subject_list, fmriprep_dir, @@ -104,14 +104,10 @@ def build_workflow(config_file, retval): ] if config.execution.anat_derivatives: - init_msg += [ - f"Anatomical derivatives: {config.execution.anat_derivatives}." - ] + init_msg += [f"Anatomical derivatives: {config.execution.anat_derivatives}."] if config.execution.fs_subjects_dir: - init_msg += [ - f"Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}." - ] + init_msg += [f"Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}."] build_log.log(25, f"\n{' ' * 11}* ".join(init_msg)) @@ -120,17 +116,22 @@ def build_workflow(config_file, retval): # Check for FS license after building the workflow if not check_valid_fs_license(): from ..utils.misc import fips_enabled + if fips_enabled(): - build_log.critical("""\ + build_log.critical( + """\ ERROR: Federal Information Processing Standard (FIPS) mode is enabled on your system. \ FreeSurfer (and thus fMRIPrep) cannot be used in FIPS mode. \ -Contact your system administrator for assistance.""") +Contact your system administrator for assistance.""" + ) else: - build_log.critical("""\ + build_log.critical( + """\ ERROR: a valid license file is required for FreeSurfer to run. fMRIPrep looked for an existing \ license file at several paths, in this order: 1) command line argument ``--fs-license-file``; \ 2) ``$FS_LICENSE`` environment variable; and 3) the ``$FREESURFER_HOME/license.txt`` path. Get it \ -(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""") +(for free) by registering at https://surfer.nmr.mgh.harvard.edu/registration.html""" + ) retval["return_code"] = 126 # 126 == Command invoked cannot execute. return retval @@ -139,9 +140,7 @@ def build_workflow(config_file, retval): if missing: build_log.critical( "Cannot run fMRIPrep. Missing dependencies:%s", - "\n\t* ".join( - [""] + [f"{cmd} (Interface: {iface})" for iface, cmd in missing] - ), + "\n\t* ".join([""] + [f"{cmd} (Interface: {iface})" for iface, cmd in missing]), ) retval["return_code"] = 127 # 127 == command not found. return retval @@ -180,7 +179,8 @@ def build_boilerplate(config_file, workflow): if not config.execution.md_only_boilerplate and citation_files["md"].exists(): from pathlib import Path - from subprocess import check_call, CalledProcessError, TimeoutExpired + from subprocess import CalledProcessError, TimeoutExpired, check_call + from pkg_resources import resource_filename as pkgrf bib_text = Path(pkgrf("fmriprep", "data/boilerplate.bib")).read_text() @@ -202,15 +202,11 @@ def build_boilerplate(config_file, workflow): str(citation_files["html"]), ] - config.loggers.cli.info( - "Generating an HTML version of the citation boilerplate..." - ) + config.loggers.cli.info("Generating an HTML version of the citation boilerplate...") try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): - config.loggers.cli.warning( - "Could not generate CITATION.html file:\n%s", " ".join(cmd) - ) + config.loggers.cli.warning("Could not generate CITATION.html file:\n%s", " ".join(cmd)) # Generate LaTex file resolving citations cmd = [ @@ -223,12 +219,8 @@ def build_boilerplate(config_file, workflow): "-o", str(citation_files["tex"]), ] - config.loggers.cli.info( - "Generating a LaTeX version of the citation boilerplate..." - ) + config.loggers.cli.info("Generating a LaTeX version of the citation boilerplate...") try: check_call(cmd, timeout=10) except (FileNotFoundError, CalledProcessError, TimeoutExpired): - config.loggers.cli.warning( - "Could not generate CITATION.tex file:\n%s", " ".join(cmd) - ) + config.loggers.cli.warning("Could not generate CITATION.tex file:\n%s", " ".join(cmd)) diff --git a/fmriprep/config.py b/fmriprep/config.py index e17ab61d1..5cdc4e973 100644 --- a/fmriprep/config.py +++ b/fmriprep/config.py @@ -91,9 +91,7 @@ from multiprocessing import set_start_method # Disable NiPype etelemetry always -_disable_et = bool( - os.getenv("NO_ET") is not None or os.getenv("NIPYPE_NO_ET") is not None -) +_disable_et = bool(os.getenv("NO_ET") is not None or os.getenv("NIPYPE_NO_ET") is not None) os.environ["NIPYPE_NO_ET"] = "1" os.environ["NO_ET"] = "1" @@ -106,14 +104,15 @@ finally: # Defer all custom import for after initializing the forkserver and # ignoring the most annoying warnings - import sys import random - from uuid import uuid4 + import sys + from pathlib import Path from time import strftime + from uuid import uuid4 - from pathlib import Path from nipype import __version__ as _nipype_ver from templateflow import __version__ as _tf_ver + from . import __version__ if not hasattr(sys, "_is_pytest_session"): @@ -146,7 +145,9 @@ if not _disable_et: # Just get so analytics track one hit from contextlib import suppress - from requests import get as _get_url, ConnectionError, ReadTimeout + + from requests import ConnectionError, ReadTimeout + from requests import get as _get_url with suppress((ConnectionError, ReadTimeout)): _get_url("https://rig.mit.edu/et/projects/nipy/nipype", timeout=0.05) @@ -171,15 +172,13 @@ del _fs_home _templateflow_home = Path( - os.getenv( - "TEMPLATEFLOW_HOME", os.path.join(os.getenv("HOME"), ".cache", "templateflow") - ) + os.getenv("TEMPLATEFLOW_HOME", os.path.join(os.getenv("HOME"), ".cache", "templateflow")) ) try: from psutil import virtual_memory - _free_mem_at_start = round(virtual_memory().free / 1024 ** 3, 1) + _free_mem_at_start = round(virtual_memory().free / 1024**3, 1) except Exception: _free_mem_at_start = None @@ -196,13 +195,8 @@ _proc_oc_kbytes = Path("/proc/sys/vm/overcommit_kbytes") if _proc_oc_kbytes.exists(): _oc_limit = _proc_oc_kbytes.read_text().strip() - if ( - _oc_limit in ("0", "n/a") - and Path("/proc/sys/vm/overcommit_ratio").exists() - ): - _oc_limit = "{}%".format( - Path("/proc/sys/vm/overcommit_ratio").read_text().strip() - ) + if _oc_limit in ("0", "n/a") and Path("/proc/sys/vm/overcommit_ratio").exists(): + _oc_limit = "{}%".format(Path("/proc/sys/vm/overcommit_ratio").read_text().strip()) except Exception: pass @@ -242,7 +236,7 @@ def load(cls, settings, init=True, ignore=None): @classmethod def get(cls): """Return defined settings.""" - from niworkflows.utils.spaces import SpatialReferences, Reference + from niworkflows.utils.spaces import Reference, SpatialReferences out = {} for k, v in cls.__dict__.items(): @@ -364,9 +358,7 @@ def init(cls): ) if cls.omp_nthreads is None: - cls.omp_nthreads = min( - cls.nprocs - 1 if cls.nprocs > 1 else os.cpu_count(), 8 - ) + cls.omp_nthreads = min(cls.nprocs - 1 if cls.nprocs > 1 else os.cpu_count(), 8) class execution(_Config): @@ -456,12 +448,11 @@ def init(cls): if cls._layout is None: import re - from bids.layout.index import BIDSLayoutIndexer + from bids.layout import BIDSLayout + from bids.layout.index import BIDSLayoutIndexer - _db_path = cls.bids_database_dir or ( - cls.work_dir / cls.run_uuid / "bids_db" - ) + _db_path = cls.bids_database_dir or (cls.work_dir / cls.run_uuid / "bids_db") _db_path.mkdir(exist_ok=True, parents=True) # Recommended after PyBIDS 12.1 @@ -492,9 +483,7 @@ def init(cls): # unserialize pybids Query enum values for acq, filters in cls.bids_filters.items(): cls.bids_filters[acq] = { - k: getattr(Query, v[7:-4]) - if not isinstance(v, Query) and "Query" in v - else v + k: getattr(Query, v[7:-4]) if not isinstance(v, Query) and "Query" in v else v for k, v in filters.items() } @@ -655,6 +644,7 @@ def _set_ants_seed(): def _set_numpy_seed(): """NumPy's random seed is independant from Python's `random` module""" import numpy as np + val = random.randint(1, 65536) np.random.seed(val) return val diff --git a/fmriprep/interfaces/confounds.py b/fmriprep/interfaces/confounds.py index d21626bd1..105f48cd8 100644 --- a/fmriprep/interfaces/confounds.py +++ b/fmriprep/interfaces/confounds.py @@ -32,32 +32,43 @@ import os import re import shutil -import numpy as np + import nibabel as nb +import numpy as np import pandas as pd from nipype import logging -from nipype.utils.filemanip import fname_presuffix from nipype.interfaces.base import ( - traits, TraitedSpec, BaseInterfaceInputSpec, File, Directory, isdefined, - SimpleInterface, InputMultiObject, OutputMultiObject + BaseInterfaceInputSpec, + Directory, + File, + InputMultiObject, + OutputMultiObject, + SimpleInterface, + TraitedSpec, + isdefined, + traits, ) -from niworkflows.viz.plots import fMRIPlot +from nipype.utils.filemanip import fname_presuffix from niworkflows.utils.timeseries import _cifti_timeseries, _nifti_timeseries +from niworkflows.viz.plots import fMRIPlot LOGGER = logging.getLogger('nipype.interface') class _aCompCorMasksInputSpec(BaseInterfaceInputSpec): in_vfs = InputMultiObject(File(exists=True), desc="Input volume fractions.") - is_aseg = traits.Bool(False, usedefault=True, - desc="Whether the input volume fractions come from FS' aseg.") - bold_zooms = traits.Tuple(traits.Float, traits.Float, traits.Float, mandatory=True, - desc="BOLD series zooms") + is_aseg = traits.Bool( + False, usedefault=True, desc="Whether the input volume fractions come from FS' aseg." + ) + bold_zooms = traits.Tuple( + traits.Float, traits.Float, traits.Float, mandatory=True, desc="BOLD series zooms" + ) class _aCompCorMasksOutputSpec(TraitedSpec): - out_masks = OutputMultiObject(File(exists=True), - desc="CSF, WM and combined masks, respectively") + out_masks = OutputMultiObject( + File(exists=True), desc="CSF, WM and combined masks, respectively" + ) class aCompCorMasks(SimpleInterface): @@ -68,6 +79,7 @@ class aCompCorMasks(SimpleInterface): def _run_interface(self, runtime): from ..utils.confounds import acompcor_masks + self._results["out_masks"] = acompcor_masks( self.inputs.in_vfs, self.inputs.is_aseg, @@ -89,15 +101,14 @@ class FilterDropped(SimpleInterface): Uses the boolean ``retained`` column to identify rows to keep or filter. """ + input_spec = _FilterDroppedInputSpec output_spec = _FilterDroppedOutputSpec def _run_interface(self, runtime): self._results["out_file"] = fname_presuffix( - self.inputs.in_file, - suffix='_filtered', - use_ext=True, - newpath=runtime.cwd) + self.inputs.in_file, suffix='_filtered', use_ext=True, newpath=runtime.cwd + ) metadata = pd.read_csv(self.inputs.in_file, sep='\t') metadata[metadata.retained].to_csv(self._results["out_file"], sep='\t', index=False) @@ -124,6 +135,7 @@ class RenameACompCor(SimpleInterface): Each set of components is renumbered to start at ``?_comp_cor_00``. """ + input_spec = _RenameACompCorInputSpec output_spec = _RenameACompCorOutputSpec @@ -138,15 +150,11 @@ def _run_interface(self, runtime): return runtime self._results["components_file"] = fname_presuffix( - self.inputs.components_file, - suffix='_renamed', - use_ext=True, - newpath=runtime.cwd) + self.inputs.components_file, suffix='_renamed', use_ext=True, newpath=runtime.cwd + ) self._results["metadata_file"] = fname_presuffix( - self.inputs.metadata_file, - suffix='_renamed', - use_ext=True, - newpath=runtime.cwd) + self.inputs.metadata_file, suffix='_renamed', use_ext=True, newpath=runtime.cwd + ) all_comp_cor = metadata[metadata["retained"]] @@ -163,10 +171,10 @@ def _run_interface(self, runtime): a_orig = a_comp_cor["component"] a_new = [f"a_comp_cor_{i:02d}" for i in range(len(a_orig))] - (components.rename(columns=dict(zip(c_orig, c_new))) - .rename(columns=dict(zip(w_orig, w_new))) - .rename(columns=dict(zip(a_orig, a_new))) - ).to_csv(self._results["components_file"], sep='\t', index=False) + final_components = components.rename(columns=dict(zip(c_orig, c_new))) + final_components.rename(columns=dict(zip(w_orig, w_new)), inplace=True) + final_components.rename(columns=dict(zip(a_orig, a_new)), inplace=True) + final_components.to_csv(self._results["components_file"], sep='\t', index=False) metadata.loc[c_comp_cor.index, "component"] = c_new metadata.loc[w_comp_cor.index, "component"] = w_new @@ -259,26 +267,23 @@ class ICAConfoundsInputSpec(BaseInterfaceInputSpec): class ICAConfoundsOutputSpec(TraitedSpec): aroma_confounds = traits.Either( - None, - File(exists=True, desc='output confounds file extracted from ICA-AROMA')) + None, File(exists=True, desc='output confounds file extracted from ICA-AROMA') + ) aroma_noise_ics = File(exists=True, desc='ICA-AROMA noise components') melodic_mix = File(exists=True, desc='melodic mix file') aroma_metadata = File(exists=True, desc='tabulated ICA-AROMA metadata') class ICAConfounds(SimpleInterface): - """Extract confounds from ICA-AROMA result directory - """ + """Extract confounds from ICA-AROMA result directory""" + input_spec = ICAConfoundsInputSpec output_spec = ICAConfoundsOutputSpec def _run_interface(self, runtime): - (aroma_confounds, - motion_ics_out, - melodic_mix_out, - aroma_metadata) = _get_ica_confounds(self.inputs.in_directory, - self.inputs.skip_vols, - newpath=runtime.cwd) + (aroma_confounds, motion_ics_out, melodic_mix_out, aroma_metadata) = _get_ica_confounds( + self.inputs.in_directory, self.inputs.skip_vols, newpath=runtime.cwd + ) if self.inputs.err_on_aroma_warn and aroma_confounds is None: raise RuntimeError('ICA-AROMA failed') @@ -291,9 +296,20 @@ def _run_interface(self, runtime): return runtime -def _gather_confounds(signals=None, dvars=None, std_dvars=None, fdisp=None, - rmsd=None, tcompcor=None, acompcor=None, crowncompcor=None, - cos_basis=None, motion=None, aroma=None, newpath=None): +def _gather_confounds( + signals=None, + dvars=None, + std_dvars=None, + fdisp=None, + rmsd=None, + tcompcor=None, + acompcor=None, + crowncompcor=None, + cos_basis=None, + motion=None, + aroma=None, + newpath=None, +): r""" Load confounds from the filenames, concatenate together horizontally and save new file. @@ -317,7 +333,7 @@ def _gather_confounds(signals=None, dvars=None, std_dvars=None, fdisp=None, """ def less_breakable(a_string): - ''' hardens the string to different envs (i.e., case insensitive, no whitespace, '#' ''' + '''hardens the string to different envs (i.e., case insensitive, no whitespace, '#' ''' return ''.join(a_string.split()).strip('#') # Taken from https://stackoverflow.com/questions/1175208/ @@ -331,26 +347,25 @@ def _adjust_indices(left_df, right_df): # instead of the end index_diff = len(left_df.index) - len(right_df.index) if index_diff > 0: - right_df.index = range(index_diff, - len(right_df.index) + index_diff) + right_df.index = range(index_diff, len(right_df.index) + index_diff) elif index_diff < 0: - left_df.index = range(-index_diff, - len(left_df.index) - index_diff) + left_df.index = range(-index_diff, len(left_df.index) - index_diff) all_files = [] confounds_list = [] - for confound, name in ((signals, 'Global signals'), - (std_dvars, 'Standardized DVARS'), - (dvars, 'DVARS'), - (fdisp, 'Framewise displacement'), - (rmsd, 'Framewise displacement (RMS)'), - (tcompcor, 'tCompCor'), - (acompcor, 'aCompCor'), - (crowncompcor, 'crownCompCor'), - (cos_basis, 'Cosine basis'), - (motion, 'Motion parameters'), - (aroma, 'ICA-AROMA') - ): + for confound, name in ( + (signals, 'Global signals'), + (std_dvars, 'Standardized DVARS'), + (dvars, 'DVARS'), + (fdisp, 'Framewise displacement'), + (rmsd, 'Framewise displacement (RMS)'), + (tcompcor, 'tCompCor'), + (acompcor, 'aCompCor'), + (crowncompcor, 'crownCompCor'), + (cos_basis, 'Cosine basis'), + (motion, 'Motion parameters'), + (aroma, 'ICA-AROMA'), + ): if confound is not None and isdefined(confound): confounds_list.append(name) if os.path.exists(confound) and os.stat(confound).st_size > 0: @@ -364,8 +379,9 @@ def _adjust_indices(left_df, right_df): # No data, nothing to concat continue for column_name in new.columns: - new.rename(columns={column_name: camel_to_snake(less_breakable(column_name))}, - inplace=True) + new.rename( + columns={column_name: camel_to_snake(less_breakable(column_name))}, inplace=True + ) _adjust_indices(confounds_data, new) confounds_data = pd.concat((confounds_data, new), axis=1) @@ -374,8 +390,7 @@ def _adjust_indices(left_df, right_df): newpath = os.getcwd() combined_out = os.path.join(newpath, 'confounds.tsv') - confounds_data.to_csv(combined_out, sep='\t', index=False, - na_rep='n/a') + confounds_data.to_csv(combined_out, sep='\t', index=False, na_rep='n/a') return combined_out, confounds_list @@ -413,16 +428,12 @@ def _get_ica_confounds(ica_out_dir, skip_vols, newpath=None): # process the metadata so that the IC column entries match the BIDS name of # the regressor aroma_metadata = pd.read_csv(aroma_metadata, sep='\t') - aroma_metadata['IC'] = [ - 'aroma_motion_{}'.format(name) for name in aroma_metadata['IC']] - aroma_metadata.columns = [ - re.sub(r'[ |\-|\/]', '_', c) for c in aroma_metadata.columns] + aroma_metadata['IC'] = ['aroma_motion_{}'.format(name) for name in aroma_metadata['IC']] + aroma_metadata.columns = [re.sub(r'[ |\-|\/]', '_', c) for c in aroma_metadata.columns] # Add variance statistics to metadata - aroma_icstats = pd.read_csv( - aroma_icstats, header=None, sep=' ')[[0, 1]] / 100 - aroma_icstats.columns = [ - 'model_variance_explained', 'total_variance_explained'] + aroma_icstats = pd.read_csv(aroma_icstats, header=None, sep=' ')[[0, 1]] / 100 + aroma_icstats.columns = ['model_variance_explained', 'total_variance_explained'] aroma_metadata = pd.concat([aroma_metadata, aroma_icstats], axis=1) aroma_metadata.to_csv(aroma_metadata_out, sep='\t', index=False) @@ -445,9 +456,9 @@ def _get_ica_confounds(ica_out_dir, skip_vols, newpath=None): # add one to motion_ic_indices to match melodic report. aroma_confounds = os.path.join(newpath, "AROMAAggrCompAROMAConfounds.tsv") - pd.DataFrame(aggr_confounds.T, - columns=['aroma_motion_%02d' % (x + 1) for x in motion_ic_indices]).to_csv( - aroma_confounds, sep="\t", index=None) + pd.DataFrame( + aggr_confounds.T, columns=['aroma_motion_%02d' % (x + 1) for x in motion_ic_indices] + ).to_csv(aroma_confounds, sep="\t", index=None) return aroma_confounds, motion_ics_out, melodic_mix_out, aroma_metadata_out @@ -461,12 +472,12 @@ class _FMRISummaryInputSpec(BaseInterfaceInputSpec): str_or_tuple = traits.Either( traits.Str, traits.Tuple(traits.Str, traits.Either(None, traits.Str)), - traits.Tuple(traits.Str, traits.Either(None, traits.Str), traits.Either(None, traits.Str))) + traits.Tuple(traits.Str, traits.Either(None, traits.Str), traits.Either(None, traits.Str)), + ) confounds_list = traits.List( - str_or_tuple, minlen=1, - desc='list of headers to extract from the confounds_file') - tr = traits.Either(None, traits.Float, usedefault=True, - desc='the repetition time') + str_or_tuple, minlen=1, desc='list of headers to extract from the confounds_file' + ) + tr = traits.Either(None, traits.Float, usedefault=True, desc='the repetition time') drop_trs = traits.Int(0, usedefault=True, desc="dummy scans") @@ -478,15 +489,14 @@ class FMRISummary(SimpleInterface): """ Copy the x-form matrices from `hdr_file` to `out_file`. """ + input_spec = _FMRISummaryInputSpec output_spec = _FMRISummaryOutputSpec def _run_interface(self, runtime): self._results['out_file'] = fname_presuffix( - self.inputs.in_nifti, - suffix='_fmriplot.svg', - use_ext=False, - newpath=runtime.cwd) + self.inputs.in_nifti, suffix='_fmriplot.svg', use_ext=False, newpath=runtime.cwd + ) has_cifti = isdefined(self.inputs.in_cifti) @@ -497,25 +507,19 @@ def _run_interface(self, runtime): nb.load(seg_file), remap_rois=False, labels=( - ("WM+CSF", "Edge") if has_cifti else - ("Ctx GM", "dGM", "WM+CSF", "Cb", "Edge") + ("WM+CSF", "Edge") if has_cifti else ("Ctx GM", "dGM", "WM+CSF", "Cb", "Edge") ), ) # Process CIFTI if has_cifti: - cifti_data, cifti_segments = _cifti_timeseries( - nb.load(self.inputs.in_cifti) - ) + cifti_data, cifti_segments = _cifti_timeseries(nb.load(self.inputs.in_cifti)) if seg_file is not None: # Append WM+CSF and Edge masks cifti_length = cifti_data.shape[0] dataset = np.vstack((cifti_data, dataset)) - segments = { - k: np.array(v) + cifti_length - for k, v in segments.items() - } + segments = {k: np.array(v) + cifti_length for k, v in segments.items()} cifti_segments.update(segments) segments = cifti_segments else: @@ -523,8 +527,12 @@ def _run_interface(self, runtime): dataframe = pd.read_csv( self.inputs.confounds_file, - sep="\t", index_col=None, dtype='float32', - na_filter=True, na_values='n/a') + sep="\t", + index_col=None, + dtype='float32', + na_filter=True, + na_values='n/a', + ) headers = [] units = {} diff --git a/fmriprep/interfaces/maths.py b/fmriprep/interfaces/maths.py index 002bb64e8..24231ae59 100644 --- a/fmriprep/interfaces/maths.py +++ b/fmriprep/interfaces/maths.py @@ -1,16 +1,17 @@ import os + import numpy as np -from nipype.interfaces.base import SimpleInterface, TraitedSpec, traits, File +from nipype.interfaces.base import File, SimpleInterface, TraitedSpec, traits from nipype.utils.filemanip import fname_presuffix class ClipInputSpec(TraitedSpec): in_file = File(exists=True, mandatory=True, desc="Input imaging file") out_file = File(desc="Output file name") - minimum = traits.Float(-np.inf, usedefault=True, - desc="Values under minimum are set to minimum") - maximum = traits.Float(np.inf, usedefault=True, - desc="Values over maximum are set to maximum") + minimum = traits.Float( + -np.inf, usedefault=True, desc="Values under minimum are set to minimum" + ) + maximum = traits.Float(np.inf, usedefault=True, desc="Values over maximum are set to maximum") class ClipOutputSpec(TraitedSpec): @@ -18,16 +19,18 @@ class ClipOutputSpec(TraitedSpec): class Clip(SimpleInterface): - """ Simple clipping interface that clips values to specified minimum/maximum + """Simple clipping interface that clips values to specified minimum/maximum If no values are outside the bounds, nothing is done and the in_file is passed as the out_file without copying. """ + input_spec = ClipInputSpec output_spec = ClipOutputSpec def _run_interface(self, runtime): import nibabel as nb + img = nb.load(self.inputs.in_file) data = img.get_fdata() @@ -37,8 +40,9 @@ def _run_interface(self, runtime): if np.any((data < self.inputs.minimum) | (data > self.inputs.maximum)): if not out_file: - out_file = fname_presuffix(self.inputs.in_file, suffix="_clipped", - newpath=runtime.cwd) + out_file = fname_presuffix( + self.inputs.in_file, suffix="_clipped", newpath=runtime.cwd + ) np.clip(data, self.inputs.minimum, self.inputs.maximum, out=data) img.__class__(data, img.affine, img.header).to_filename(out_file) elif not out_file: @@ -58,21 +62,21 @@ class Label2MaskOutputSpec(TraitedSpec): class Label2Mask(SimpleInterface): - """ Create mask file for a label from a multi-label segmentation - """ + """Create mask file for a label from a multi-label segmentation""" + input_spec = Label2MaskInputSpec output_spec = Label2MaskOutputSpec def _run_interface(self, runtime): import nibabel as nb + img = nb.load(self.inputs.in_file) mask = np.uint16(img.dataobj) == self.inputs.label_val out_img = img.__class__(mask, img.affine, img.header) out_img.set_data_dtype(np.uint8) - out_file = fname_presuffix(self.inputs.in_file, suffix="_mask", - newpath=runtime.cwd) + out_file = fname_presuffix(self.inputs.in_file, suffix="_mask", newpath=runtime.cwd) out_img.to_filename(out_file) diff --git a/fmriprep/interfaces/multiecho.py b/fmriprep/interfaces/multiecho.py index 27dcdcde6..25e0925f9 100644 --- a/fmriprep/interfaces/multiecho.py +++ b/fmriprep/interfaces/multiecho.py @@ -37,39 +37,42 @@ from nipype import logging from nipype.interfaces.base import ( - traits, TraitedSpec, File, - CommandLine, CommandLineInputSpec) + CommandLine, + CommandLineInputSpec, + File, + TraitedSpec, + traits, +) LOGGER = logging.getLogger('nipype.interface') class T2SMapInputSpec(CommandLineInputSpec): - in_files = traits.List(File(exists=True), - argstr='-d %s', - position=1, - mandatory=True, - minlen=3, - desc='multi-echo BOLD EPIs') - echo_times = traits.List(traits.Float, - argstr='-e %s', - position=2, - mandatory=True, - minlen=3, - desc='echo times') - mask_file = File(argstr='--mask %s', - position=3, - desc='mask file', - exists=True) - fittype = traits.Enum('curvefit', 'loglin', - argstr='--fittype %s', - position=4, - usedefault=True, - desc=('Desired fitting method: ' - '"loglin" means that a linear model is fit ' - 'to the log of the data. ' - '"curvefit" means that a more computationally ' - 'demanding monoexponential model is fit ' - 'to the raw data.')) + in_files = traits.List( + File(exists=True), + argstr='-d %s', + position=1, + mandatory=True, + minlen=3, + desc='multi-echo BOLD EPIs', + ) + echo_times = traits.List( + traits.Float, argstr='-e %s', position=2, mandatory=True, minlen=3, desc='echo times' + ) + mask_file = File(argstr='--mask %s', position=3, desc='mask file', exists=True) + fittype = traits.Enum( + 'curvefit', + 'loglin', + argstr='--fittype %s', + position=4, + usedefault=True, + desc=( + 'Desired fitting method: ' + '"loglin" means that a linear model is fit to the log of the data. ' + '"curvefit" means that a more computationally demanding ' + 'monoexponential model is fit to the raw data.' + ), + ) class T2SMapOutputSpec(TraitedSpec): @@ -96,6 +99,7 @@ class T2SMap(CommandLine): sub-01_run-01_echo-3_bold.nii.gz -e 13.0 27.0 43.0 --fittype curvefit' """ + _cmd = 't2smap' input_spec = T2SMapInputSpec output_spec = T2SMapOutputSpec diff --git a/fmriprep/interfaces/patches.py b/fmriprep/interfaces/patches.py index 4e297bfae..c9eac5a7a 100644 --- a/fmriprep/interfaces/patches.py +++ b/fmriprep/interfaces/patches.py @@ -29,8 +29,8 @@ from random import randint from time import sleep -from numpy.linalg.linalg import LinAlgError from nipype.algorithms import confounds as nac +from numpy.linalg.linalg import LinAlgError class RobustACompCor(nac.ACompCor): diff --git a/fmriprep/interfaces/reports.py b/fmriprep/interfaces/reports.py index 6fba840aa..16cd637f3 100644 --- a/fmriprep/interfaces/reports.py +++ b/fmriprep/interfaces/reports.py @@ -22,20 +22,26 @@ # """Interfaces to generate reportlets.""" +import logging import os -import time import re -import logging - +import time from collections import Counter + from nipype.interfaces.base import ( - traits, TraitedSpec, BaseInterfaceInputSpec, - File, Directory, InputMultiObject, Str, isdefined, - SimpleInterface) + BaseInterfaceInputSpec, + Directory, + File, + InputMultiObject, + SimpleInterface, + Str, + TraitedSpec, + isdefined, + traits, +) from niworkflows.interfaces.reportlets import base as nrb from smriprep.interfaces.freesurfer import ReconAll - LOGGER = logging.getLogger('nipype.interface') SUBJECT_TEMPLATE = """\ @@ -103,9 +109,10 @@ class SubjectSummaryInputSpec(BaseInterfaceInputSpec): t2w = InputMultiObject(File(exists=True), desc='T2w structural images') subjects_dir = Directory(desc='FreeSurfer subjects directory') subject_id = Str(desc='Subject ID') - bold = InputMultiObject(traits.Either( - File(exists=True), traits.List(File(exists=True))), - desc='BOLD functional series') + bold = InputMultiObject( + traits.Either(File(exists=True), traits.List(File(exists=True))), + desc='BOLD functional series', + ) std_spaces = traits.List(Str, desc='list of standard spaces') nstd_spaces = traits.List(Str, desc='list of non-standard spaces') @@ -133,15 +140,18 @@ def _generate_segment(self): '(_(?Ptask-[a-zA-Z0-9]+))?' '(_(?Pacq-[a-zA-Z0-9]+))?' '(_(?Prec-[a-zA-Z0-9]+))?' - '(_(?Prun-[a-zA-Z0-9]+))?') + '(_(?Prun-[a-zA-Z0-9]+))?' + ) if not isdefined(self.inputs.subjects_dir): freesurfer_status = 'Not run' else: - recon = ReconAll(subjects_dir=self.inputs.subjects_dir, - subject_id='sub-' + self.inputs.subject_id, - T1_files=self.inputs.t1w, - flags='-noskullstrip') + recon = ReconAll( + subjects_dir=self.inputs.subjects_dir, + subject_id='sub-' + self.inputs.subject_id, + T1_files=self.inputs.t1w, + flags='-noskullstrip', + ) if recon.cmdline.startswith('echo'): freesurfer_status = 'Pre-existing directory' else: @@ -155,16 +165,20 @@ def _generate_segment(self): bold_series = self.inputs.bold if isdefined(self.inputs.bold) else [] bold_series = [s[0] if isinstance(s, list) else s for s in bold_series] - counts = Counter(BIDS_NAME.search(series).groupdict()['task_id'][5:] - for series in bold_series) + counts = Counter( + BIDS_NAME.search(series).groupdict()['task_id'][5:] for series in bold_series + ) tasks = '' if counts: header = '\t\t
    ' footer = '\t\t
' - lines = ['\t\t\t
  • Task: {task_id} ({n_runs:d} run{s})
  • '.format( - task_id=task_id, n_runs=n_runs, s='' if n_runs == 1 else 's') - for task_id, n_runs in sorted(counts.items())] + lines = [ + '\t\t\t
  • Task: {task_id} ({n_runs:d} run{s})
  • '.format( + task_id=task_id, n_runs=n_runs, s='' if n_runs == 1 else 's' + ) + for task_id, n_runs in sorted(counts.items()) + ] tasks = '\n'.join([header] + lines + [footer]) return SUBJECT_TEMPLATE.format( @@ -175,24 +189,42 @@ def _generate_segment(self): tasks=tasks, std_spaces=', '.join(self.inputs.std_spaces), nstd_spaces=', '.join(self.inputs.nstd_spaces), - freesurfer_status=freesurfer_status) + freesurfer_status=freesurfer_status, + ) class FunctionalSummaryInputSpec(BaseInterfaceInputSpec): - slice_timing = traits.Enum(False, True, 'TooShort', usedefault=True, - desc='Slice timing correction used') - distortion_correction = traits.Str(desc='Susceptibility distortion correction method', - mandatory=True) - pe_direction = traits.Enum(None, 'i', 'i-', 'j', 'j-', 'k', 'k-', mandatory=True, - desc='Phase-encoding direction detected') - registration = traits.Enum('FSL', 'FreeSurfer', mandatory=True, - desc='Functional/anatomical registration method') + slice_timing = traits.Enum( + False, True, 'TooShort', usedefault=True, desc='Slice timing correction used' + ) + distortion_correction = traits.Str( + desc='Susceptibility distortion correction method', mandatory=True + ) + pe_direction = traits.Enum( + None, + 'i', + 'i-', + 'j', + 'j-', + 'k', + 'k-', + mandatory=True, + desc='Phase-encoding direction detected', + ) + registration = traits.Enum( + 'FSL', 'FreeSurfer', mandatory=True, desc='Functional/anatomical registration method' + ) fallback = traits.Bool(desc='Boundary-based registration rejected') - registration_dof = traits.Enum(6, 9, 12, desc='Registration degrees of freedom', - mandatory=True) - registration_init = traits.Enum('register', 'header', mandatory=True, - desc='Whether to initialize registration with the "header"' - ' or by centering the volumes ("register")') + registration_dof = traits.Enum( + 6, 9, 12, desc='Registration degrees of freedom', mandatory=True + ) + registration_init = traits.Enum( + 'register', + 'header', + mandatory=True, + desc='Whether to initialize registration with the "header"' + ' or by centering the volumes ("register")', + ) confounds_file = File(exists=True, desc='Confounds file') tr = traits.Float(desc='Repetition time', mandatory=True) dummy_scans = traits.Either(traits.Int(), None, desc='number of dummy scans specified by user') @@ -206,19 +238,23 @@ class FunctionalSummary(SummaryInterface): def _generate_segment(self): dof = self.inputs.registration_dof - stc = {True: 'Applied', - False: 'Not applied', - 'TooShort': 'Skipped (too few volumes)'}[self.inputs.slice_timing] + stc = { + True: 'Applied', + False: 'Not applied', + 'TooShort': 'Skipped (too few volumes)', + }[self.inputs.slice_timing] # #TODO: Add a note about registration_init below? reg = { 'FSL': [ 'FSL flirt with boundary-based registration' ' (BBR) metric - %d dof' % dof, - 'FSL flirt rigid registration - 6 dof'], + 'FSL flirt rigid registration - 6 dof', + ], 'FreeSurfer': [ 'FreeSurfer bbregister ' '(boundary-based registration, BBR) - %d dof' % dof, - 'FreeSurfer mri_coreg - %d dof' % dof], + 'FreeSurfer mri_coreg - %d dof' % dof, + ], }[self.inputs.registration][self.inputs.fallback] pedir = get_world_pedir(self.inputs.orientation, self.inputs.pe_direction) @@ -229,17 +265,15 @@ def _generate_segment(self): dummy_scan_tmp = "{n_dum}" if self.inputs.dummy_scans == self.inputs.algo_dummy_scans: - dummy_scan_msg = ( - ' '.join([dummy_scan_tmp, "(Confirmed: {n_alg} automatically detected)"]) - .format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) - ) + dummy_scan_msg = ' '.join( + [dummy_scan_tmp, "(Confirmed: {n_alg} automatically detected)"] + ).format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) # the number of dummy scans was specified by the user and # it is not equal to the number detected by the algorithm elif self.inputs.dummy_scans is not None: - dummy_scan_msg = ( - ' '.join([dummy_scan_tmp, "(Warning: {n_alg} automatically detected)"]) - .format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) - ) + dummy_scan_msg = ' '.join( + [dummy_scan_tmp, "(Warning: {n_alg} automatically detected)"] + ).format(n_dum=self.inputs.dummy_scans, n_alg=self.inputs.algo_dummy_scans) # the number of dummy scans was not specified by the user else: dummy_scan_msg = dummy_scan_tmp.format(n_dum=self.inputs.algo_dummy_scans) @@ -252,12 +286,19 @@ def _generate_segment(self): "in single-echo mode." ) if n_echos > 2: - multiecho = (f"Multi-echo EPI sequence: {n_echos} echoes.") + multiecho = f"Multi-echo EPI sequence: {n_echos} echoes." return FUNCTIONAL_TEMPLATE.format( - pedir=pedir, stc=stc, sdc=self.inputs.distortion_correction, registration=reg, - confounds=re.sub(r'[\t ]+', ', ', conflist), tr=self.inputs.tr, - dummy_scan_desc=dummy_scan_msg, multiecho=multiecho, ornt=self.inputs.orientation) + pedir=pedir, + stc=stc, + sdc=self.inputs.distortion_correction, + registration=reg, + confounds=re.sub(r'[\t ]+', ', ', conflist), + tr=self.inputs.tr, + dummy_scan_desc=dummy_scan_msg, + multiecho=multiecho, + ornt=self.inputs.orientation, + ) class AboutSummaryInputSpec(BaseInterfaceInputSpec): @@ -270,20 +311,18 @@ class AboutSummary(SummaryInterface): input_spec = AboutSummaryInputSpec def _generate_segment(self): - return ABOUT_TEMPLATE.format(version=self.inputs.version, - command=self.inputs.command, - date=time.strftime("%Y-%m-%d %H:%M:%S %z")) + return ABOUT_TEMPLATE.format( + version=self.inputs.version, + command=self.inputs.command, + date=time.strftime("%Y-%m-%d %H:%M:%S %z"), + ) class LabeledHistogramInputSpec(nrb._SVGReportCapableInputSpec): - in_file = traits.File( - exists=True, - mandatory=True, - desc="Image containing values to plot" - ) + in_file = traits.File(exists=True, mandatory=True, desc="Image containing values to plot") label_file = traits.File( exists=True, - desc="Mask or label image where non-zero values will be used to extract data from in_file" + desc="Mask or label image where non-zero values will be used to extract data from in_file", ) mapping = traits.Dict(desc="Map integer label values onto names of voxels") xlabel = traits.Str("voxels", usedefault=True, desc="Description of values plotted") @@ -293,11 +332,11 @@ class LabeledHistogram(nrb.ReportingInterface): input_spec = LabeledHistogramInputSpec def _generate_report(self): - import numpy as np import nibabel as nb - from nilearn.image import resample_to_img - from matplotlib import pyplot as plt + import numpy as np import seaborn as sns + from matplotlib import pyplot as plt + from nilearn.image import resample_to_img report_file = self._out_report img = nb.load(self.inputs.in_file) @@ -314,10 +353,7 @@ def _generate_report(self): uniq_labels = np.unique(labels[labels > 0]) label_map = self.inputs.mapping or {label: label for label in uniq_labels} - rois = { - label_map.get(label, label): data[labels == label] - for label in label_map - } + rois = {label_map.get(label, label): data[labels == label] for label in label_map} with sns.axes_style('whitegrid'): fig = sns.histplot(rois, bins=50) fig.set_xlabel(self.inputs.xlabel) @@ -327,11 +363,7 @@ def _generate_report(self): def get_world_pedir(ornt, pe_direction): """Return world direction of phase encoding""" - axes = ( - ("Right", "Left"), - ("Anterior", "Posterior"), - ("Superior", "Inferior") - ) + axes = (("Right", "Left"), ("Anterior", "Posterior"), ("Superior", "Inferior")) ax_idcs = {"i": 0, "j": 1, "k": 2} if pe_direction is not None: diff --git a/fmriprep/interfaces/tests/conftest.py b/fmriprep/interfaces/tests/conftest.py index 534ab8e39..4f0821826 100644 --- a/fmriprep/interfaces/tests/conftest.py +++ b/fmriprep/interfaces/tests/conftest.py @@ -1,6 +1,7 @@ -import pytest from pathlib import Path +import pytest + @pytest.fixture(scope="module") def data_dir(): diff --git a/fmriprep/interfaces/tests/test_confounds.py b/fmriprep/interfaces/tests/test_confounds.py index f87642463..b32cf278e 100644 --- a/fmriprep/interfaces/tests/test_confounds.py +++ b/fmriprep/interfaces/tests/test_confounds.py @@ -1,11 +1,12 @@ +from pathlib import Path + from nipype.pipeline import engine as pe + from fmriprep.interfaces import confounds -from pathlib import Path def test_RenameACompCor(tmp_path, data_dir): - renamer = pe.Node(confounds.RenameACompCor(), name="renamer", - base_dir=str(tmp_path)) + renamer = pe.Node(confounds.RenameACompCor(), name="renamer", base_dir=str(tmp_path)) renamer.inputs.components_file = data_dir / "acompcor_truncated.tsv" renamer.inputs.metadata_file = data_dir / "component_metadata_truncated.tsv" diff --git a/fmriprep/interfaces/tests/test_maths.py b/fmriprep/interfaces/tests/test_maths.py index d0ca703b2..3fa98a9e0 100644 --- a/fmriprep/interfaces/tests/test_maths.py +++ b/fmriprep/interfaces/tests/test_maths.py @@ -1,12 +1,13 @@ import nibabel as nb import numpy as np from nipype.pipeline import engine as pe + from fmriprep.interfaces.maths import Clip def test_Clip(tmp_path): in_file = str(tmp_path / "input.nii") - data = np.array([[[-1., 1.], [-2., 2.]]]) + data = np.array([[[-1.0, 1.0], [-2.0, 2.0]]]) nb.Nifti1Image(data, np.eye(4)).to_filename(in_file) threshold = pe.Node(Clip(in_file=in_file, minimum=0), name="threshold", base_dir=tmp_path) @@ -15,37 +16,28 @@ def test_Clip(tmp_path): assert ret.outputs.out_file == str(tmp_path / "threshold/input_clipped.nii") out_img = nb.load(ret.outputs.out_file) - assert np.allclose(out_img.get_fdata(), [[[0., 1.], [0., 2.]]]) + assert np.allclose(out_img.get_fdata(), [[[0.0, 1.0], [0.0, 2.0]]]) - threshold2 = pe.Node( - Clip(in_file=in_file, minimum=-3), - name="threshold2", - base_dir=tmp_path) + threshold2 = pe.Node(Clip(in_file=in_file, minimum=-3), name="threshold2", base_dir=tmp_path) ret = threshold2.run() assert ret.outputs.out_file == in_file out_img = nb.load(ret.outputs.out_file) - assert np.allclose(out_img.get_fdata(), [[[-1., 1.], [-2., 2.]]]) + assert np.allclose(out_img.get_fdata(), [[[-1.0, 1.0], [-2.0, 2.0]]]) - clip = pe.Node( - Clip(in_file=in_file, minimum=-1, maximum=1), - name="clip", - base_dir=tmp_path) + clip = pe.Node(Clip(in_file=in_file, minimum=-1, maximum=1), name="clip", base_dir=tmp_path) ret = clip.run() assert ret.outputs.out_file == str(tmp_path / "clip/input_clipped.nii") out_img = nb.load(ret.outputs.out_file) - assert np.allclose(out_img.get_fdata(), [[[-1., 1.], [-1., 1.]]]) + assert np.allclose(out_img.get_fdata(), [[[-1.0, 1.0], [-1.0, 1.0]]]) - nonpositive = pe.Node( - Clip(in_file=in_file, maximum=0), - name="nonpositive", - base_dir=tmp_path) + nonpositive = pe.Node(Clip(in_file=in_file, maximum=0), name="nonpositive", base_dir=tmp_path) ret = nonpositive.run() assert ret.outputs.out_file == str(tmp_path / "nonpositive/input_clipped.nii") out_img = nb.load(ret.outputs.out_file) - assert np.allclose(out_img.get_fdata(), [[[-1., 0.], [-2., 0.]]]) + assert np.allclose(out_img.get_fdata(), [[[-1.0, 0.0], [-2.0, 0.0]]]) diff --git a/fmriprep/interfaces/tests/test_reports.py b/fmriprep/interfaces/tests/test_reports.py index e3fb1af9e..d316b332b 100644 --- a/fmriprep/interfaces/tests/test_reports.py +++ b/fmriprep/interfaces/tests/test_reports.py @@ -25,25 +25,28 @@ from ..reports import get_world_pedir -@pytest.mark.parametrize("orientation,pe_dir,expected", [ - ('RAS', 'j', 'Posterior-Anterior'), - ('RAS', 'j-', 'Anterior-Posterior'), - ('RAS', 'i', 'Left-Right'), - ('RAS', 'i-', 'Right-Left'), - ('RAS', 'k', 'Inferior-Superior'), - ('RAS', 'k-', 'Superior-Inferior'), - ('LAS', 'j', 'Posterior-Anterior'), - ('LAS', 'i-', 'Left-Right'), - ('LAS', 'k-', 'Superior-Inferior'), - ('LPI', 'j', 'Anterior-Posterior'), - ('LPI', 'i-', 'Left-Right'), - ('LPI', 'k-', 'Inferior-Superior'), - ('SLP', 'k-', 'Posterior-Anterior'), - ('SLP', 'k', 'Anterior-Posterior'), - ('SLP', 'j-', 'Left-Right'), - ('SLP', 'j', 'Right-Left'), - ('SLP', 'i', 'Inferior-Superior'), - ('SLP', 'i-', 'Superior-Inferior'), -]) +@pytest.mark.parametrize( + "orientation,pe_dir,expected", + [ + ('RAS', 'j', 'Posterior-Anterior'), + ('RAS', 'j-', 'Anterior-Posterior'), + ('RAS', 'i', 'Left-Right'), + ('RAS', 'i-', 'Right-Left'), + ('RAS', 'k', 'Inferior-Superior'), + ('RAS', 'k-', 'Superior-Inferior'), + ('LAS', 'j', 'Posterior-Anterior'), + ('LAS', 'i-', 'Left-Right'), + ('LAS', 'k-', 'Superior-Inferior'), + ('LPI', 'j', 'Anterior-Posterior'), + ('LPI', 'i-', 'Left-Right'), + ('LPI', 'k-', 'Inferior-Superior'), + ('SLP', 'k-', 'Posterior-Anterior'), + ('SLP', 'k', 'Anterior-Posterior'), + ('SLP', 'j-', 'Left-Right'), + ('SLP', 'j', 'Right-Left'), + ('SLP', 'i', 'Inferior-Superior'), + ('SLP', 'i-', 'Superior-Inferior'), + ], +) def test_get_world_pedir(tmpdir, orientation, pe_dir, expected): assert get_world_pedir(orientation, pe_dir) == expected diff --git a/fmriprep/reports/conftest.py b/fmriprep/reports/conftest.py index 08592849c..4abc68aac 100644 --- a/fmriprep/reports/conftest.py +++ b/fmriprep/reports/conftest.py @@ -22,9 +22,10 @@ # """py.test configuration""" import os +import tempfile from pathlib import Path + import pytest -import tempfile @pytest.fixture(autouse=True) diff --git a/fmriprep/reports/core.py b/fmriprep/reports/core.py index 4daf7452c..610940c8f 100644 --- a/fmriprep/reports/core.py +++ b/fmriprep/reports/core.py @@ -21,6 +21,7 @@ # https://www.nipreps.org/community/licensing/ # from pathlib import Path + from niworkflows.reports.core import Report as _Report # This patch is intended to permit fMRIPrep 20.2.0 LTS to use the YODA-style @@ -51,6 +52,7 @@ def _load_config(self, config): # The following are the interface used directly by fMRIPrep # + def run_reports( out_dir, subject_label, @@ -120,9 +122,7 @@ def generate_reports( logger = logging.getLogger("cli") error_list = ", ".join( - "%s (%d)" % (subid, err) - for subid, err in zip(subject_list, report_errors) - if err + f"{subid} ({err})" for subid, err in zip(subject_list, report_errors) if err ) logger.error( "Preprocessing did not finish successfully. Errors occurred while processing " diff --git a/fmriprep/tests/test_config.py b/fmriprep/tests/test_config.py index e2bc1d08d..0d226bc83 100644 --- a/fmriprep/tests/test_config.py +++ b/fmriprep/tests/test_config.py @@ -23,12 +23,12 @@ """Check the configuration module and file.""" import os from pathlib import Path -from pkg_resources import resource_filename as pkgrf from unittest.mock import patch import pytest -from toml import loads from niworkflows.utils.spaces import format_reference +from pkg_resources import resource_filename as pkgrf +from toml import loads from .. import config @@ -41,6 +41,7 @@ def _reset_config(): `importlib.reload` creates new sets of objects, but will not remove previous references to those objects.""" import importlib + importlib.reload(config) @@ -69,24 +70,24 @@ def test_config_spaces(): config.init_spaces() spaces = config.workflow.spaces - assert "MNI152NLin6Asym:res-2" not in [ - str(s) for s in spaces.get_standard(full_spec=True)] + assert "MNI152NLin6Asym:res-2" not in [str(s) for s in spaces.get_standard(full_spec=True)] assert "MNI152NLin6Asym_res-2" not in [ format_reference((s.fullname, s.spec)) - for s in spaces.references if s.standard and s.dim == 3 + for s in spaces.references + if s.standard and s.dim == 3 ] config.workflow.use_aroma = True config.init_spaces() spaces = config.workflow.spaces - assert "MNI152NLin6Asym:res-2" in [ - str(s) for s in spaces.get_standard(full_spec=True)] + assert "MNI152NLin6Asym:res-2" in [str(s) for s in spaces.get_standard(full_spec=True)] assert "MNI152NLin6Asym_res-2" in [ format_reference((s.fullname, s.spec)) - for s in spaces.references if s.standard and s.dim == 3 + for s in spaces.references + if s.standard and s.dim == 3 ] config.execution.output_spaces = None @@ -98,14 +99,15 @@ def test_config_spaces(): assert [ format_reference((s.fullname, s.spec)) - for s in spaces.references if s.standard and s.dim == 3 + for s in spaces.references + if s.standard and s.dim == 3 ] == ['MNI152NLin2009cAsym'] _reset_config() -@pytest.mark.parametrize("master_seed,ants_seed,numpy_seed", [ - (1, 17612, 8272), (100, 19094, 60232) -]) +@pytest.mark.parametrize( + "master_seed,ants_seed,numpy_seed", [(1, 17612, 8272), (100, 19094, 60232)] +) def test_prng_seed(master_seed, ants_seed, numpy_seed): """Ensure seeds are properly tracked""" seeds = config.seeds diff --git a/fmriprep/tests/test_fsl6.py b/fmriprep/tests/test_fsl6.py index 987452fd1..6ba4e471d 100644 --- a/fmriprep/tests/test_fsl6.py +++ b/fmriprep/tests/test_fsl6.py @@ -1,11 +1,10 @@ -from packaging.version import LegacyVersion -from pathlib import Path import shutil +from pathlib import Path -from nipype.interfaces import fsl import pytest import templateflow.api as tf - +from nipype.interfaces import fsl +from packaging.version import LegacyVersion fslversion = fsl.Info.version() TEMPLATE = tf.get("MNI152NLin2009cAsym", resolution=2, desc=None, suffix="T1w") @@ -13,18 +12,21 @@ @pytest.mark.skipif(fslversion is None, reason="fsl required") @pytest.mark.skipif(LegacyVersion(fslversion) < LegacyVersion("6.0.0"), reason="FSL6 test") -@pytest.mark.parametrize("path_parent,filename", [ - (".", "brain.nii.gz"), - ( - "pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification", - "supercalifragilisticexpialidocious.nii.gz", - ), - ( - "pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification/" - "antidisestablishmentarianism/pseudopseudohypoparathyroidism/sesquipedalian", - "brain.nii.gz" - ) -]) +@pytest.mark.parametrize( + "path_parent,filename", + [ + (".", "brain.nii.gz"), + ( + "pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification", + "supercalifragilisticexpialidocious.nii.gz", + ), + ( + "pneumonoultramicroscopicsilicovolcanoconiosis/floccinaucinihilipilification/" + "antidisestablishmentarianism/pseudopseudohypoparathyroidism/sesquipedalian", + "brain.nii.gz", + ), + ], +) def test_fsl6_long_filenames(tmp_path, path_parent, filename): test_dir = tmp_path / path_parent test_dir.mkdir(parents=True, exist_ok=True) diff --git a/fmriprep/utils/bids.py b/fmriprep/utils/bids.py index c67bf5638..7d570c5fb 100644 --- a/fmriprep/utils/bids.py +++ b/fmriprep/utils/bids.py @@ -21,20 +21,25 @@ # https://www.nipreps.org/community/licensing/ # """Utilities to handle BIDS inputs.""" +import json import os import sys -import json from pathlib import Path def write_bidsignore(deriv_dir): bids_ignore = ( - "*.html", "logs/", "figures/", # Reports + "*.html", + "logs/", + "figures/", # Reports "*_xfm.*", # Unspecified transform files "*.surf.gii", # Unspecified structural outputs # Unspecified functional outputs - "*_boldref.nii.gz", "*_bold.func.gii", - "*_mixing.tsv", "*_AROMAnoiseICs.csv", "*_timeseries.tsv", + "*_boldref.nii.gz", + "*_bold.func.gii", + "*_mixing.tsv", + "*_AROMAnoiseICs.csv", + "*_timeseries.tsv", ) ignore_file = Path(deriv_dir) / ".bidsignore" @@ -42,7 +47,7 @@ def write_bidsignore(deriv_dir): def write_derivative_description(bids_dir, deriv_dir): - from ..__about__ import __version__, DOWNLOAD_URL + from ..__about__ import DOWNLOAD_URL, __version__ bids_dir = Path(bids_dir) deriv_dir = Path(deriv_dir) @@ -50,27 +55,28 @@ def write_derivative_description(bids_dir, deriv_dir): 'Name': 'fMRIPrep - fMRI PREProcessing workflow', 'BIDSVersion': '1.4.0', 'DatasetType': 'derivative', - 'GeneratedBy': [{ - 'Name': 'fMRIPrep', - 'Version': __version__, - 'CodeURL': DOWNLOAD_URL, - }], - 'HowToAcknowledge': - 'Please cite our paper (https://doi.org/10.1038/s41592-018-0235-4), ' - 'and include the generated citation boilerplate within the Methods ' - 'section of the text.', + 'GeneratedBy': [ + { + 'Name': 'fMRIPrep', + 'Version': __version__, + 'CodeURL': DOWNLOAD_URL, + } + ], + 'HowToAcknowledge': 'Please cite our paper (https://doi.org/10.1038/s41592-018-0235-4), ' + 'and include the generated citation boilerplate within the Methods ' + 'section of the text.', } # Keys that can only be set by environment if 'FMRIPREP_DOCKER_TAG' in os.environ: desc['GeneratedBy'][0]['Container'] = { "Type": "docker", - "Tag": f"nipreps/fmriprep:{os.environ['FMRIPREP_DOCKER_TAG']}" + "Tag": f"nipreps/fmriprep:{os.environ['FMRIPREP_DOCKER_TAG']}", } if 'FMRIPREP_SINGULARITY_URL' in os.environ: desc['GeneratedBy'][0]['Container'] = { "Type": "singularity", - "URI": os.getenv('FMRIPREP_SINGULARITY_URL') + "URI": os.getenv('FMRIPREP_SINGULARITY_URL'), } # Keys deriving from source dataset @@ -80,10 +86,9 @@ def write_derivative_description(bids_dir, deriv_dir): orig_desc = json.loads(fname.read_text()) if 'DatasetDOI' in orig_desc: - desc['SourceDatasets'] = [{ - 'URL': f'https://doi.org/{orig_desc["DatasetDOI"]}', - 'DOI': orig_desc['DatasetDOI'] - }] + desc['SourceDatasets'] = [ + {'URL': f'https://doi.org/{orig_desc["DatasetDOI"]}', 'DOI': orig_desc['DatasetDOI']} + ] if 'License' in orig_desc: desc['License'] = orig_desc['License'] @@ -92,8 +97,9 @@ def write_derivative_description(bids_dir, deriv_dir): def validate_input_dir(exec_env, bids_dir, participant_label): # Ignore issues and warnings that should not influence FMRIPREP - import tempfile import subprocess + import tempfile + validator_config_dict = { "ignore": [ "EVENTS_COLUMN_ONSET", @@ -137,31 +143,36 @@ def validate_input_dir(exec_env, bids_dir, participant_label): "MALFORMED_BVEC", "MALFORMED_BVAL", "MISSING_TSV_COLUMN_EEG_ELECTRODES", - "MISSING_SESSION" + "MISSING_SESSION", ], "error": ["NO_T1W"], - "ignoredFiles": ['/dataset_description.json', '/participants.tsv'] + "ignoredFiles": ['/dataset_description.json', '/participants.tsv'], } # Limit validation only to data from requested participants if participant_label: all_subs = set([s.name[4:] for s in bids_dir.glob('sub-*')]) - selected_subs = set([s[4:] if s.startswith('sub-') else s - for s in participant_label]) + selected_subs = set([s[4:] if s.startswith('sub-') else s for s in participant_label]) bad_labels = selected_subs.difference(all_subs) if bad_labels: - error_msg = 'Data for requested participant(s) label(s) not found. Could ' \ - 'not find data for participant(s): %s. Please verify the requested ' \ - 'participant labels.' + error_msg = ( + 'Data for requested participant(s) label(s) not found. Could ' + 'not find data for participant(s): %s. Please verify the requested ' + 'participant labels.' + ) if exec_env == 'docker': - error_msg += ' This error can be caused by the input data not being ' \ - 'accessible inside the docker container. Please make sure all ' \ - 'volumes are mounted properly (see https://docs.docker.com/' \ - 'engine/reference/commandline/run/#mount-volume--v---read-only)' + error_msg += ( + ' This error can be caused by the input data not being ' + 'accessible inside the docker container. Please make sure all ' + 'volumes are mounted properly (see https://docs.docker.com/' + 'engine/reference/commandline/run/#mount-volume--v---read-only)' + ) if exec_env == 'singularity': - error_msg += ' This error can be caused by the input data not being ' \ - 'accessible inside the singularity container. Please make sure ' \ - 'all paths are mapped properly (see https://www.sylabs.io/' \ - 'guides/3.0/user-guide/bind_paths_and_mounts.html)' + error_msg += ( + ' This error can be caused by the input data not being ' + 'accessible inside the singularity container. Please make sure ' + 'all paths are mapped properly (see https://www.sylabs.io/' + 'guides/3.0/user-guide/bind_paths_and_mounts.html)' + ) raise RuntimeError(error_msg % ','.join(bad_labels)) ignored_subs = all_subs.difference(selected_subs) diff --git a/fmriprep/utils/confounds.py b/fmriprep/utils/confounds.py index 6d69d43f9..5748a89b8 100644 --- a/fmriprep/utils/confounds.py +++ b/fmriprep/utils/confounds.py @@ -31,8 +31,8 @@ def mask2vf(in_file, zooms=None, out_file=None): by the zooms given as argument. """ - import numpy as np import nibabel as nb + import numpy as np from scipy.ndimage import gaussian_filter img = nb.load(in_file) @@ -107,8 +107,9 @@ def acompcor_masks(in_files, is_aseg=False, zooms=None): """ from pathlib import Path - import numpy as np + import nibabel as nb + import numpy as np from scipy.ndimage import binary_dilation from skimage.morphology import ball diff --git a/fmriprep/utils/meepi.py b/fmriprep/utils/meepi.py index 00ec68c55..c95429cb2 100644 --- a/fmriprep/utils/meepi.py +++ b/fmriprep/utils/meepi.py @@ -36,7 +36,9 @@ def combine_meepi_source(in_files): """ import os + from nipype.utils.filemanip import filename_to_list + base, in_file = os.path.split(filename_to_list(in_files)[0]) entities = [ent for ent in in_file.split('_') if not ent.startswith('echo-')] basename = '_'.join(entities) diff --git a/fmriprep/utils/misc.py b/fmriprep/utils/misc.py index e088e0e19..7e735119c 100644 --- a/fmriprep/utils/misc.py +++ b/fmriprep/utils/misc.py @@ -26,11 +26,12 @@ def check_deps(workflow): """Make sure dependencies are present in this system.""" from nipype.utils.filemanip import which + return sorted( (node.interface.__class__.__name__, node.interface._cmd) for node in workflow._get_all_nodes() - if (hasattr(node.interface, '_cmd') - and which(node.interface._cmd.split()[0]) is None)) + if (hasattr(node.interface, '_cmd') and which(node.interface._cmd.split()[0]) is None) + ) def fips_enabled(): @@ -41,5 +42,6 @@ def fips_enabled(): https://github.com/nipreps/fmriprep/issues/2480#issuecomment-891199276 """ from pathlib import Path + fips = Path("/proc/sys/crypto/fips_enabled") return fips.exists() and fips.read_text()[0] != "0" diff --git a/fmriprep/utils/sentry.py b/fmriprep/utils/sentry.py index 25e7e5455..48eb6be4c 100644 --- a/fmriprep/utils/sentry.py +++ b/fmriprep/utils/sentry.py @@ -23,8 +23,9 @@ """Stripped out routines for Sentry.""" import os import re -from niworkflows.utils.misc import read_crashfile + from nibabel.optpkg import optional_package +from niworkflows.utils.misc import read_crashfile from .. import config @@ -33,21 +34,14 @@ CHUNK_SIZE = 16384 # Group common events with pre specified fingerprints KNOWN_ERRORS = { - 'permission-denied': [ - "PermissionError: [Errno 13] Permission denied" - ], + 'permission-denied': ["PermissionError: [Errno 13] Permission denied"], 'memory-error': [ "MemoryError", "Cannot allocate memory", "Return code: 134", ], - 'reconall-already-running': [ - "ERROR: it appears that recon-all is already running" - ], - 'no-disk-space': [ - "[Errno 28] No space left on device", - "[Errno 122] Disk quota exceeded" - ], + 'reconall-already-running': ["ERROR: it appears that recon-all is already running"], + 'no-disk-space': ["[Errno 28] No space left on device", "[Errno 122] Disk quota exceeded"], 'segfault': [ "Segmentation Fault", "Segfault", @@ -66,15 +60,21 @@ def sentry_setup(): """Set-up sentry.""" release = config.environment.version or "dev" - environment = "dev" if ( - os.getenv('FMRIPREP_DEV', '').lower in ('1', 'on', 'yes', 'y', 'true') - or ('+' in release) - ) else "prod" - - sentry_sdk.init("https://d5a16b0c38d84d1584dfc93b9fb1ade6@sentry.io/1137693", - release=release, - environment=environment, - before_send=before_send) + environment = ( + "dev" + if ( + os.getenv('FMRIPREP_DEV', '').lower in ('1', 'on', 'yes', 'y', 'true') + or ('+' in release) + ) + else "prod" + ) + + sentry_sdk.init( + "https://d5a16b0c38d84d1584dfc93b9fb1ade6@sentry.io/1137693", + release=release, + environment=environment, + before_send=before_send, + ) with sentry_sdk.configure_scope() as scope: for k, v in config.get(flat=True).items(): scope.set_tag(k, v) @@ -100,8 +100,7 @@ def process_crashfile(crashfile): break exception_text_start += 1 - exception_text = '\n'.join( - traceback.splitlines()[exception_text_start:]) + exception_text = '\n'.join(traceback.splitlines()[exception_text_start:]) # Extract inputs, if present inputs = crash_info.pop('inputs', None) @@ -129,7 +128,7 @@ def process_crashfile(crashfile): break message = issue_title + '\n\n' - message += exception_text[-(8192 - len(message)):] + message += exception_text[-(8192 - len(message)) :] if fingerprint: sentry_sdk.add_breadcrumb(message=fingerprint, level='fatal') else: @@ -159,8 +158,12 @@ def before_send(event, hints): return None if 'breadcrumbs' in event and isinstance(event['breadcrumbs'], list): - fingerprints_to_propagate = ['no-disk-space', 'memory-error', 'permission-denied', - 'keyboard-interrupt'] + fingerprints_to_propagate = [ + 'no-disk-space', + 'memory-error', + 'permission-denied', + 'keyboard-interrupt', + ] for bc in event['breadcrumbs']: msg = bc.get('message', 'empty-msg') if msg in fingerprints_to_propagate: @@ -178,5 +181,4 @@ def _chunks(string, length=CHUNK_SIZE): ['som', 'e l', 'ong', 'er ', 'str', 'ing', '.'] """ - return (string[i:i + length] - for i in range(0, len(string), length)) + return (string[i : i + length] for i in range(0, len(string), length)) diff --git a/fmriprep/utils/testing.py b/fmriprep/utils/testing.py index 9379649f2..dcc551dbc 100644 --- a/fmriprep/utils/testing.py +++ b/fmriprep/utils/testing.py @@ -26,33 +26,31 @@ """ -import unittest import logging -from networkx.exception import NetworkXUnfeasible +import unittest -from nipype.pipeline import engine as pe -from nipype.interfaces.base import isdefined +from networkx.exception import NetworkXUnfeasible from nipype.interfaces import utility as niu +from nipype.interfaces.base import isdefined +from nipype.pipeline import engine as pe logging.disable(logging.INFO) # <- do we really want to do this? class TestWorkflow(unittest.TestCase): - ''' Subclass for test within the workflow module. + '''Subclass for test within the workflow module. invoke tests with ``python -m unittest discover test''' - def assertIsAlmostExpectedWorkflow(self, expected_name, expected_interfaces, - expected_inputs, expected_outputs, - actual): - ''' somewhat hacky way to confirm workflows are as expected, but with low confidence ''' + def assertIsAlmostExpectedWorkflow( + self, expected_name, expected_interfaces, expected_inputs, expected_outputs, actual + ): + '''somewhat hacky way to confirm workflows are as expected, but with low confidence''' self.assertIsInstance(actual, pe.Workflow) self.assertEqual(expected_name, actual.name) # assert it has the same nodes - actual_nodes = [actual.get_node(name) - for name in actual.list_node_names()] - actual_interfaces = [node.interface.__class__.__name__ - for node in actual_nodes] + actual_nodes = [actual.get_node(name) for name in actual.list_node_names()] + actual_interfaces = [node.interface.__class__.__name__ for node in actual_nodes] # assert lists equal self.assertIsSubsetOfList(expected_interfaces, actual_interfaces) @@ -74,8 +72,7 @@ def get_io_names(pre, ios): actual_inputs = [] actual_outputs = [] - node_tuples = [(node.name, node.inputs.items(), node.outputs.items()) - for node in nodes] + node_tuples = [(node.name, node.inputs.items(), node.outputs.items()) for node in nodes] for name, inputs, outputs in node_tuples: pre = str(name) + "." actual_inputs += get_io_names(pre, inputs) @@ -86,7 +83,7 @@ def get_io_names(pre, ios): return actual_inputs, actual_outputs def assert_circular(self, workflow, circular_connections): - ''' check key paths in workflow by specifying some connections that should induce + '''check key paths in workflow by specifying some connections that should induce circular paths, which trips a NetworkX error. circular_connections is a list of tuples: [('from_node_name', 'to_node_name', ('from_node.output_field','to_node.input_field'))] diff --git a/fmriprep/workflows/base.py b/fmriprep/workflows/base.py index f1954fd71..9020691ed 100644 --- a/fmriprep/workflows/base.py +++ b/fmriprep/workflows/base.py @@ -29,18 +29,17 @@ """ -import sys import os +import sys from copy import deepcopy -from packaging.version import Version - -from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from packaging.version import Version from .. import config from ..interfaces import DerivativesDataSink -from ..interfaces.reports import SubjectSummary, AboutSummary +from ..interfaces.reports import AboutSummary, SubjectSummary from .bold import init_func_preproc_wf @@ -83,7 +82,8 @@ def init_fmriprep_wf(): minimum_fs_version="7.0.0", ), name='fsdir_run_%s' % config.execution.run_uuid.replace('-', '_'), - run_without_submitting=True) + run_without_submitting=True, + ) if config.execution.fs_subjects_dir is not None: fsdir.inputs.subjects_dir = str(config.execution.fs_subjects_dir.absolute()) @@ -91,20 +91,19 @@ def init_fmriprep_wf(): single_subject_wf = init_single_subject_wf(subject_id) single_subject_wf.config['execution']['crashdump_dir'] = str( - config.execution.fmriprep_dir / f"sub-{subject_id}" - / "log" / config.execution.run_uuid + config.execution.fmriprep_dir / f"sub-{subject_id}" / "log" / config.execution.run_uuid ) for node in single_subject_wf._get_all_nodes(): node.config = deepcopy(single_subject_wf.config) if freesurfer: - fmriprep_wf.connect(fsdir, 'subjects_dir', - single_subject_wf, 'inputnode.subjects_dir') + fmriprep_wf.connect(fsdir, 'subjects_dir', single_subject_wf, 'inputnode.subjects_dir') else: fmriprep_wf.add_nodes([single_subject_wf]) # Dump a copy of the config file into the log directory - log_dir = config.execution.fmriprep_dir / f"sub-{subject_id}" \ - / 'log' / config.execution.run_uuid + log_dir = ( + config.execution.fmriprep_dir / f"sub-{subject_id}" / 'log' / config.execution.run_uuid + ) log_dir.mkdir(exist_ok=True, parents=True) config.to_filename(log_dir / 'fmriprep.toml') @@ -144,7 +143,7 @@ def init_single_subject_wf(subject_id): """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from niworkflows.interfaces.bids import BIDSInfo, BIDSDataGrabber + from niworkflows.interfaces.bids import BIDSDataGrabber, BIDSInfo from niworkflows.interfaces.nilearn import NILEARN_VERSION from niworkflows.utils.bids import collect_data from niworkflows.utils.misc import fix_multi_T1w_source_name @@ -157,7 +156,8 @@ def init_single_subject_wf(subject_id): subject_id, task=config.execution.task_id, echo=config.execution.echo_idx, - bids_filters=config.execution.bids_filters)[0] + bids_filters=config.execution.bids_filters, + )[0] if 'flair' in config.workflow.ignore: subject_data['flair'] = [] @@ -173,11 +173,13 @@ def init_single_subject_wf(subject_id): raise RuntimeError( "No BOLD images found for participant {} and task {}. " "All workflows require BOLD images.".format( - subject_id, task_id if task_id else '') + subject_id, task_id if task_id else '' + ) ) if anat_derivatives: from smriprep.utils.bids import collect_derivatives + std_spaces = spaces.get_spaces(nonstandard=False, dim=(3,)) anat_derivatives = collect_derivatives( anat_derivatives.absolute(), @@ -186,15 +188,19 @@ def init_single_subject_wf(subject_id): config.workflow.run_reconall, ) if anat_derivatives is None: - config.loggers.workflow.warning(f"""\ + config.loggers.workflow.warning( + f"""\ Attempted to access pre-existing anatomical derivatives at \ <{config.execution.anat_derivatives}>, however not all expectations of fMRIPrep \ were met (for participant <{subject_id}>, spaces <{', '.join(std_spaces)}>, \ -reconall <{config.workflow.run_reconall}>).""") +reconall <{config.workflow.run_reconall}>).""" + ) if not anat_derivatives and not subject_data['t1w']: - raise Exception("No T1w images found for participant {}. " - "All workflows require T1w images.".format(subject_id)) + raise Exception( + "No T1w images found for participant {}. " + "All workflows require T1w images.".format(subject_id) + ) workflow = Workflow(name=name) workflow.__desc__ = """ @@ -204,8 +210,9 @@ def init_single_subject_wf(subject_id): which is based on *Nipype* {nipype_ver} (@nipype1; @nipype2; RRID:SCR_002502). -""".format(fmriprep_ver=config.environment.version, - nipype_ver=config.environment.nipype_version) +""".format( + fmriprep_ver=config.environment.version, nipype_ver=config.environment.nipype_version + ) workflow.__postdesc__ = """ Many internal operations of *fMRIPrep* use @@ -227,39 +234,64 @@ def init_single_subject_wf(subject_id): ### References -""".format(nilearn_ver=NILEARN_VERSION) +""".format( + nilearn_ver=NILEARN_VERSION + ) fmriprep_dir = str(config.execution.fmriprep_dir) - inputnode = pe.Node(niu.IdentityInterface(fields=['subjects_dir']), - name='inputnode') + inputnode = pe.Node(niu.IdentityInterface(fields=['subjects_dir']), name='inputnode') - bidssrc = pe.Node(BIDSDataGrabber(subject_data=subject_data, - anat_only=anat_only, - anat_derivatives=anat_derivatives, - subject_id=subject_id), - name='bidssrc') + bidssrc = pe.Node( + BIDSDataGrabber( + subject_data=subject_data, + anat_only=anat_only, + anat_derivatives=anat_derivatives, + subject_id=subject_id, + ), + name='bidssrc', + ) - bids_info = pe.Node(BIDSInfo( - bids_dir=config.execution.bids_dir, bids_validate=False), name='bids_info') + bids_info = pe.Node( + BIDSInfo(bids_dir=config.execution.bids_dir, bids_validate=False), name='bids_info' + ) - summary = pe.Node(SubjectSummary(std_spaces=spaces.get_spaces(nonstandard=False), - nstd_spaces=spaces.get_spaces(standard=False)), - name='summary', run_without_submitting=True) + summary = pe.Node( + SubjectSummary( + std_spaces=spaces.get_spaces(nonstandard=False), + nstd_spaces=spaces.get_spaces(standard=False), + ), + name='summary', + run_without_submitting=True, + ) - about = pe.Node(AboutSummary(version=config.environment.version, - command=' '.join(sys.argv)), - name='about', run_without_submitting=True) + about = pe.Node( + AboutSummary(version=config.environment.version, command=' '.join(sys.argv)), + name='about', + run_without_submitting=True, + ) ds_report_summary = pe.Node( - DerivativesDataSink(base_directory=fmriprep_dir, desc='summary', datatype="figures", - dismiss_entities=("echo",)), - name='ds_report_summary', run_without_submitting=True) + DerivativesDataSink( + base_directory=fmriprep_dir, + desc='summary', + datatype="figures", + dismiss_entities=("echo",), + ), + name='ds_report_summary', + run_without_submitting=True, + ) ds_report_about = pe.Node( - DerivativesDataSink(base_directory=fmriprep_dir, desc='about', datatype="figures", - dismiss_entities=("echo",)), - name='ds_report_about', run_without_submitting=True) + DerivativesDataSink( + base_directory=fmriprep_dir, + desc='about', + datatype="figures", + dismiss_entities=("echo",), + ), + name='ds_report_about', + run_without_submitting=True, + ) # Preprocessing of T1w (includes registration to MNI) anat_preproc_wf = init_anat_preproc_wf( @@ -273,12 +305,11 @@ def init_single_subject_wf(subject_id): output_dir=fmriprep_dir, skull_strip_fixed_seed=config.workflow.skull_strip_fixed_seed, skull_strip_mode=config.workflow.skull_strip_t1w, - skull_strip_template=Reference.from_string( - config.workflow.skull_strip_template)[0], + skull_strip_template=Reference.from_string(config.workflow.skull_strip_template)[0], spaces=spaces, t1w=subject_data['t1w'], ) - + # fmt:off workflow.connect([ (inputnode, anat_preproc_wf, [('subjects_dir', 'inputnode.subjects_dir')]), (inputnode, summary, [('subjects_dir', 'subjects_dir')]), @@ -308,6 +339,7 @@ def init_single_subject_wf(subject_id): (anat_preproc_wf, ds_report_summary, [('outputnode.t1w_preproc', 'source_file')]), (anat_preproc_wf, ds_report_about, [('outputnode.t1w_preproc', 'source_file')]), ]) + # fmt:on # Overwrite ``out_path_base`` of smriprep's DataSinks for node in workflow.list_node_names(): @@ -318,11 +350,16 @@ def init_single_subject_wf(subject_id): return workflow from sdcflows import fieldmaps as fm + fmap_estimators = None - if any(("fieldmaps" not in config.workflow.ignore, + if any( + ( + "fieldmaps" not in config.workflow.ignore, config.workflow.use_syn_sdc, - config.workflow.force_syn)): + config.workflow.force_syn, + ) + ): from sdcflows.utils.wrangler import find_estimators # SDC Step 1: Run basic heuristics to identify available data for fieldmap estimation @@ -335,24 +372,22 @@ def init_single_subject_wf(subject_id): ) if config.workflow.use_syn_sdc and not fmap_estimators: - message = ("Fieldmap-less (SyN) estimation was requested, but " - "PhaseEncodingDirection information appears to be " - "absent.") + message = ( + "Fieldmap-less (SyN) estimation was requested, but PhaseEncodingDirection " + "information appears to be absent." + ) config.loggers.workflow.error(message) if config.workflow.use_syn_sdc == "error": raise ValueError(message) - if ( - "fieldmaps" in config.workflow.ignore - and [f for f in fmap_estimators - if f.method != fm.EstimatorType.ANAT] + if "fieldmaps" in config.workflow.ignore and any( + f.method == fm.EstimatorType.ANAT for f in fmap_estimators ): config.loggers.workflow.info( 'Option "--ignore fieldmaps" was set, but either "--use-syn-sdc" ' 'or "--force-syn" were given, so fieldmap-less estimation will be executed.' ) - fmap_estimators = [f for f in fmap_estimators - if f.method == fm.EstimatorType.ANAT] + fmap_estimators = [f for f in fmap_estimators if f.method == fm.EstimatorType.ANAT] if fmap_estimators: config.loggers.workflow.info( @@ -368,7 +403,9 @@ def init_single_subject_wf(subject_id): : For each of the {num_bold} BOLD runs found per subject (across all tasks and sessions), the following preprocessing was performed. -""".format(num_bold=len(subject_data['bold'])) +""".format( + num_bold=len(subject_data['bold']) + ) func_preproc_wfs = [] has_fieldmap = bool(fmap_estimators) @@ -378,23 +415,25 @@ def init_single_subject_wf(subject_id): continue func_preproc_wf.__desc__ = func_pre_desc + (func_preproc_wf.__desc__ or "") + # fmt:off workflow.connect([ - (anat_preproc_wf, func_preproc_wf, - [('outputnode.t1w_preproc', 'inputnode.t1w_preproc'), - ('outputnode.t1w_mask', 'inputnode.t1w_mask'), - ('outputnode.t1w_dseg', 'inputnode.t1w_dseg'), - ('outputnode.t1w_aseg', 'inputnode.t1w_aseg'), - ('outputnode.t1w_aparc', 'inputnode.t1w_aparc'), - ('outputnode.t1w_tpms', 'inputnode.t1w_tpms'), - ('outputnode.template', 'inputnode.template'), - ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), - ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), - # Undefined if --fs-no-reconall, but this is safe - ('outputnode.subjects_dir', 'inputnode.subjects_dir'), - ('outputnode.subject_id', 'inputnode.subject_id'), - ('outputnode.t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), - ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')]), + (anat_preproc_wf, func_preproc_wf, [ + ('outputnode.t1w_preproc', 'inputnode.t1w_preproc'), + ('outputnode.t1w_mask', 'inputnode.t1w_mask'), + ('outputnode.t1w_dseg', 'inputnode.t1w_dseg'), + ('outputnode.t1w_aseg', 'inputnode.t1w_aseg'), + ('outputnode.t1w_aparc', 'inputnode.t1w_aparc'), + ('outputnode.t1w_tpms', 'inputnode.t1w_tpms'), + ('outputnode.template', 'inputnode.template'), + ('outputnode.anat2std_xfm', 'inputnode.anat2std_xfm'), + ('outputnode.std2anat_xfm', 'inputnode.std2anat_xfm'), + # Undefined if --fs-no-reconall, but this is safe + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.t1w2fsnative_xfm', 'inputnode.t1w2fsnative_xfm'), + ('outputnode.fsnative2t1w_xfm', 'inputnode.fsnative2t1w_xfm')]), ]) + # fmt:on func_preproc_wfs.append(func_preproc_wf) if not has_fieldmap: @@ -417,7 +456,7 @@ def init_single_subject_wf(subject_id): BIDS structure for this particular subject. """ for func_preproc_wf in func_preproc_wfs: - # fmt: off + # fmt:off workflow.connect([ (fmap_wf, func_preproc_wf, [ ("outputnode.fmap", "inputnode.fmap"), @@ -428,7 +467,7 @@ def init_single_subject_wf(subject_id): ("outputnode.method", "inputnode.sdc_method"), ]), ]) - # fmt: on + # fmt:on # Overwrite ``out_path_base`` of sdcflows's DataSinks for node in fmap_wf.list_node_names(): @@ -440,6 +479,7 @@ def init_single_subject_wf(subject_id): # Select "MNI152NLin2009cAsym" from standard references. # This node may be used by multiple ANAT estimators, so define outside loop. from niworkflows.interfaces.utility import KeySelect + fmap_select_std = pe.Node( KeySelect(fields=["std2anat_xfm"], key="MNI152NLin2009cAsym"), name="fmap_select_std", @@ -455,9 +495,11 @@ def init_single_subject_wf(subject_id): # fmt:on for estimator in fmap_estimators: - config.loggers.workflow.info(f"""\ + config.loggers.workflow.info( + f"""\ Setting-up fieldmap "{estimator.bids_id}" ({estimator.method}) with \ -<{', '.join(s.path.name for s in estimator.sources)}>""") +<{', '.join(s.path.name for s in estimator.sources)}>""" + ) # Mapped and phasediff can be connected internally by SDCFlows if estimator.method in (fm.EstimatorType.MAPPED, fm.EstimatorType.PHASEDIFF): @@ -476,9 +518,7 @@ def init_single_subject_wf(subject_id): flatten = fmap_wf.get_node(f"wf_{estimator.bids_id}.flatten") flatten.inputs.max_trs = config.workflow.topup_max_vols else: - raise NotImplementedError( - "Sophisticated PEPOLAR schemes are unsupported." - ) + raise NotImplementedError("Sophisticated PEPOLAR schemes are unsupported.") elif estimator.method == fm.EstimatorType.ANAT: from sdcflows.workflows.fit.syn import init_syn_preprocessing_wf diff --git a/fmriprep/workflows/bold/__init__.py b/fmriprep/workflows/bold/__init__.py index bee4ede42..473ac0f64 100644 --- a/fmriprep/workflows/bold/__init__.py +++ b/fmriprep/workflows/bold/__init__.py @@ -17,23 +17,16 @@ """ from .base import init_func_preproc_wf +from .confounds import init_bold_confs_wf, init_ica_aroma_wf from .hmc import init_bold_hmc_wf -from .stc import init_bold_stc_wf -from .t2s import init_bold_t2s_wf -from .registration import ( - init_bold_t1_trans_wf, - init_bold_reg_wf, -) +from .registration import init_bold_reg_wf, init_bold_t1_trans_wf from .resampling import ( + init_bold_preproc_trans_wf, init_bold_std_trans_wf, init_bold_surf_wf, - init_bold_preproc_trans_wf, -) - -from .confounds import ( - init_bold_confs_wf, - init_ica_aroma_wf, ) +from .stc import init_bold_stc_wf +from .t2s import init_bold_t2s_wf __all__ = [ 'init_bold_confs_wf', diff --git a/fmriprep/workflows/bold/base.py b/fmriprep/workflows/bold/base.py index 5824a5ad2..7b7531e45 100644 --- a/fmriprep/workflows/bold/base.py +++ b/fmriprep/workflows/bold/base.py @@ -28,35 +28,31 @@ .. autofunction:: init_func_derivatives_wf """ -from ... import config - import os import nibabel as nb +from nipype.interfaces import utility as niu from nipype.interfaces.fsl import Split as FSLSplit from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu - -from niworkflows.utils.connections import pop_file, listify - - -from ...utils.meepi import combine_meepi_source +from niworkflows.utils.connections import listify, pop_file +from ... import config from ...interfaces import DerivativesDataSink from ...interfaces.reports import FunctionalSummary +from ...utils.meepi import combine_meepi_source # BOLD workflows from .confounds import init_bold_confs_wf, init_carpetplot_wf from .hmc import init_bold_hmc_wf -from .stc import init_bold_stc_wf -from .t2s import init_bold_t2s_wf, init_t2s_reporting_wf -from .registration import init_bold_t1_trans_wf, init_bold_reg_wf +from .outputs import init_func_derivatives_wf +from .registration import init_bold_reg_wf, init_bold_t1_trans_wf from .resampling import ( - init_bold_surf_wf, - init_bold_std_trans_wf, init_bold_preproc_trans_wf, + init_bold_std_trans_wf, + init_bold_surf_wf, ) -from .outputs import init_func_derivatives_wf +from .stc import init_bold_stc_wf +from .t2s import init_bold_t2s_wf, init_t2s_reporting_wf def init_func_preproc_wf(bold_file, has_fieldmap=False): @@ -201,14 +197,13 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.func.util import init_bold_reference_wf from niworkflows.interfaces.nibabel import ApplyMask - from niworkflows.interfaces.utility import KeySelect, DictMerge from niworkflows.interfaces.reportlets.registration import ( SimpleBeforeAfterRPT as SimpleBeforeAfter, ) + from niworkflows.interfaces.utility import DictMerge, KeySelect - if nb.load( - bold_file[0] if isinstance(bold_file, (list, tuple)) else bold_file - ).shape[3:] <= (5 - config.execution.sloppy,): + nvols = nb.load(bold_file[0] if isinstance(bold_file, (list, tuple)) else bold_file).shape[3] + if nvols <= 5 - config.execution.sloppy: config.loggers.workflow.warning( f"Too short BOLD series (<= 5 timepoints). Skipping processing of <{bold_file}>." ) @@ -297,17 +292,16 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): estimator_key = listify(metadata.get("B0FieldSource")) if not estimator_key: - from pathlib import Path import re + from pathlib import Path + from sdcflows.fieldmaps import get_identifier # Fallback to IntendedFor intended_rel = re.sub( r"^sub-[a-zA-Z0-9]*/", "", - str(Path( - bold_file if not multiecho else bold_file[0] - ).relative_to(layout.root)) + str(Path(bold_file if not multiecho else bold_file[0]).relative_to(layout.root)), ) estimator_key = get_identifier(intended_rel) @@ -319,13 +313,11 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): else: config.loggers.workflow.info( f"Found usable B0-map (fieldmap) estimator(s) <{', '.join(estimator_key)}> " - f"to correct <{bold_file}> for susceptibility-derived distortions.") + f"to correct <{bold_file}> for susceptibility-derived distortions." + ) # Check whether STC must/can be run - run_stc = ( - bool(metadata.get("SliceTiming")) - and "slicetiming" not in config.workflow.ignore - ) + run_stc = bool(metadata.get("SliceTiming")) and "slicetiming" not in config.workflow.ignore # Build workflow workflow = Workflow(name=wf_name) @@ -501,9 +493,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): select_bold = pe.Node(niu.Select(), name="select_bold") # Top-level BOLD splitter - bold_split = pe.Node( - FSLSplit(dimension="t"), name="bold_split", mem_gb=mem_gb["filesize"] * 3 - ) + bold_split = pe.Node(FSLSplit(dimension="t"), name="bold_split", mem_gb=mem_gb["filesize"] * 3) # HMC on the BOLD bold_hmc_wf = init_bold_hmc_wf( @@ -605,7 +595,7 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): bold_final = pe.Node( niu.IdentityInterface(fields=["bold", "boldref", "mask", "bold_echos", "t2star"]), - name="bold_final" + name="bold_final", ) # Generate a final BOLD reference @@ -1045,18 +1035,14 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): # REPORTING ############################################################ ds_report_summary = pe.Node( - DerivativesDataSink( - desc="summary", datatype="figures", dismiss_entities=("echo",) - ), + DerivativesDataSink(desc="summary", datatype="figures", dismiss_entities=("echo",)), name="ds_report_summary", run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB, ) ds_report_validation = pe.Node( - DerivativesDataSink( - desc="validation", datatype="figures", dismiss_entities=("echo",) - ), + DerivativesDataSink(desc="validation", datatype="figures", dismiss_entities=("echo",)), name="ds_report_validation", run_without_submitting=True, mem_gb=config.DEFAULT_MEMORY_MIN_GB, @@ -1119,8 +1105,8 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False): return workflow from niworkflows.interfaces.utility import KeySelect - from sdcflows.workflows.apply.registration import init_coeff2epi_wf from sdcflows.workflows.apply.correction import init_unwarp_wf + from sdcflows.workflows.apply.registration import init_coeff2epi_wf coeff2epi_wf = init_coeff2epi_wf( debug="fieldmaps" in config.execution.debug, @@ -1271,7 +1257,7 @@ def _dpop(list_of_lists): def _create_mem_gb(bold_fname): - bold_size_gb = os.path.getsize(bold_fname) / (1024 ** 3) + bold_size_gb = os.path.getsize(bold_fname) / (1024**3) bold_tlen = nb.load(bold_fname).shape[-1] mem_gb = { "filesize": bold_size_gb, @@ -1330,13 +1316,12 @@ def extract_entities(file_list): """ from collections import defaultdict + from bids.layout import parse_file_entities entities = defaultdict(list) for e, v in [ - ev_pair - for f in listify(file_list) - for ev_pair in parse_file_entities(f).items() + ev_pair for f in listify(file_list) for ev_pair in parse_file_entities(f).items() ]: entities[e].append(v) diff --git a/fmriprep/workflows/bold/confounds.py b/fmriprep/workflows/bold/confounds.py index 30fd31ad5..651ef1cb4 100644 --- a/fmriprep/workflows/bold/confounds.py +++ b/fmriprep/workflows/bold/confounds.py @@ -31,19 +31,21 @@ from os import getenv from nipype.algorithms import confounds as nac -from nipype.interfaces import utility as niu, fsl +from nipype.interfaces import fsl +from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from templateflow.api import get as get_template from fmriprep import config + from ...config import DEFAULT_MEMORY_MIN_GB from ...interfaces import DerivativesDataSink from ...interfaces.confounds import ( + FilterDropped, + FMRISummary, GatherConfounds, ICAConfounds, - FMRISummary, RenameACompCor, - FilterDropped, ) @@ -154,15 +156,17 @@ def init_bold_confs_wf( from niworkflows.interfaces.confounds import ExpandModel, SpikeRegressors from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.images import SignalExtraction - from niworkflows.interfaces.reportlets.masks import ROIsPlot from niworkflows.interfaces.morphology import BinaryDilation, BinarySubtraction from niworkflows.interfaces.nibabel import ApplyMask, Binarize - from niworkflows.interfaces.patches import ( - RobustACompCor as ACompCor, - RobustTCompCor as TCompCor, + from niworkflows.interfaces.patches import RobustACompCor as ACompCor + from niworkflows.interfaces.patches import RobustTCompCor as TCompCor + from niworkflows.interfaces.plotting import ( + CompCorVariancePlot, + ConfoundsCorrelationPlot, ) - from niworkflows.interfaces.plotting import CompCorVariancePlot, ConfoundsCorrelationPlot - from niworkflows.interfaces.utility import AddTSVHeader, TSV2JSON, DictMerge + from niworkflows.interfaces.reportlets.masks import ROIsPlot + from niworkflows.interfaces.utility import TSV2JSON, AddTSVHeader, DictMerge + from ...interfaces.confounds import aCompCorMasks gm_desc = ( @@ -250,10 +254,7 @@ def init_bold_confs_wf( ApplyTransforms(interpolation="MultiLabel"), name="t1w_mask_tfm", ) - union_mask = pe.Node( - niu.Function(function=_binary_union), - name="union_mask" - ) + union_mask = pe.Node(niu.Function(function=_binary_union), name="union_mask") # Create the crown mask dilated_mask = pe.Node(BinaryDilation(), name="dilated_mask") @@ -839,7 +840,7 @@ def init_ica_aroma_wf( """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.reportlets.segmentation import ICA_AROMARPT - from niworkflows.interfaces.utility import KeySelect, TSV2JSON + from niworkflows.interfaces.utility import TSV2JSON, KeySelect workflow = Workflow(name=name) workflow.__postdesc__ = """\ @@ -1055,8 +1056,9 @@ def _add_volumes(bold_file, bold_cut_file, skip_vols): def _binary_union(mask1, mask2): """Generate the union of two masks.""" from pathlib import Path - import numpy as np + import nibabel as nb + import numpy as np img = nb.load(mask1) mskarr1 = np.asanyarray(img.dataobj, dtype=int) > 0 @@ -1071,16 +1073,17 @@ def _binary_union(mask1, mask2): def _carpet_parcellation(segmentation, crown_mask, nifti=False): """Generate the union of two masks.""" from pathlib import Path - import numpy as np + import nibabel as nb + import numpy as np img = nb.load(segmentation) lut = np.zeros((256,), dtype="uint8") lut[100:201] = 1 if nifti else 0 # Ctx GM - lut[30:99] = 2 if nifti else 0 # dGM - lut[1:11] = 3 if nifti else 1 # WM+CSF - lut[255] = 4 if nifti else 0 # Cerebellum + lut[30:99] = 2 if nifti else 0 # dGM + lut[1:11] = 3 if nifti else 1 # WM+CSF + lut[255] = 4 if nifti else 0 # Cerebellum # Apply lookup table seg = lut[np.asanyarray(img.dataobj, dtype="uint16")] seg[np.asanyarray(nb.load(crown_mask).dataobj, dtype=int) > 0] = 5 if nifti else 2 diff --git a/fmriprep/workflows/bold/hmc.py b/fmriprep/workflows/bold/hmc.py index c538c7b6d..8dda7d5f8 100644 --- a/fmriprep/workflows/bold/hmc.py +++ b/fmriprep/workflows/bold/hmc.py @@ -28,8 +28,9 @@ """ +from nipype.interfaces import fsl +from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, fsl from ...config import DEFAULT_MEMORY_MIN_GB @@ -88,31 +89,34 @@ def init_bold_hmc_wf(mem_gb, omp_nthreads, name='bold_hmc_wf'): (transformation matrices, and six corresponding rotation and translation parameters) are estimated before any spatiotemporal filtering using `mcflirt` [FSL {fsl_ver}, @mcflirt]. -""".format(fsl_ver=fsl.Info().version() or '') +""".format( + fsl_ver=fsl.Info().version() or '' + ) inputnode = pe.Node( - niu.IdentityInterface(fields=['bold_file', 'raw_ref_image']), - name='inputnode') + niu.IdentityInterface(fields=['bold_file', 'raw_ref_image']), name='inputnode' + ) outputnode = pe.Node( - niu.IdentityInterface( - fields=['xforms', 'movpar_file', 'rmsd_file']), - name='outputnode') + niu.IdentityInterface(fields=['xforms', 'movpar_file', 'rmsd_file']), name='outputnode' + ) # Head motion correction (hmc) mcflirt = pe.Node( fsl.MCFLIRT(save_mats=True, save_plots=True, save_rms=True), - name='mcflirt', mem_gb=mem_gb * 3) + name='mcflirt', + mem_gb=mem_gb * 3, + ) - fsl2itk = pe.Node(MCFLIRT2ITK(), name='fsl2itk', - mem_gb=0.05, n_procs=omp_nthreads) + fsl2itk = pe.Node(MCFLIRT2ITK(), name='fsl2itk', mem_gb=0.05, n_procs=omp_nthreads) - normalize_motion = pe.Node(NormalizeMotionParams(format='FSL'), - name="normalize_motion", - mem_gb=DEFAULT_MEMORY_MIN_GB) + normalize_motion = pe.Node( + NormalizeMotionParams(format='FSL'), name="normalize_motion", mem_gb=DEFAULT_MEMORY_MIN_GB + ) def _pick_rel(rms_files): return rms_files[-1] + # fmt:off workflow.connect([ (inputnode, mcflirt, [('raw_ref_image', 'ref_file'), ('bold_file', 'in_file')]), @@ -124,5 +128,6 @@ def _pick_rel(rms_files): (fsl2itk, outputnode, [('out_file', 'xforms')]), (normalize_motion, outputnode, [('out_file', 'movpar_file')]), ]) + # fmt:on return workflow diff --git a/fmriprep/workflows/bold/outputs.py b/fmriprep/workflows/bold/outputs.py index b40891352..8e134d29c 100644 --- a/fmriprep/workflows/bold/outputs.py +++ b/fmriprep/workflows/bold/outputs.py @@ -22,8 +22,8 @@ # """Writing out derivative files.""" import numpy as np -from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe from fmriprep import config from fmriprep.config import DEFAULT_MEMORY_MIN_GB @@ -31,7 +31,7 @@ def prepare_timing_parameters(metadata): - """ Convert initial timing metadata to post-realignment timing metadata + """Convert initial timing metadata to post-realignment timing metadata In particular, SliceTiming metadata is invalid once STC or any realignment is applied, as a matrix of voxels no longer corresponds to an acquisition slice. @@ -84,9 +84,15 @@ def prepare_timing_parameters(metadata): """ timing_parameters = { key: metadata[key] - for key in ("RepetitionTime", "VolumeTiming", "DelayTime", - "AcquisitionDuration", "SliceTiming") - if key in metadata} + for key in ( + "RepetitionTime", + "VolumeTiming", + "DelayTime", + "AcquisitionDuration", + "SliceTiming", + ) + if key in metadata + } run_stc = "SliceTiming" in metadata and 'slicetiming' not in config.workflow.ignore timing_parameters["SliceTimingCorrected"] = run_stc @@ -175,42 +181,91 @@ def init_func_derivatives_wf( 'EstimationAlgorithm': 'monoexponential decay model', } - inputnode = pe.Node(niu.IdentityInterface(fields=[ - 'aroma_noise_ics', 'bold_aparc_std', 'bold_aparc_t1', 'bold_aseg_std', - 'bold_aseg_t1', 'bold_cifti', 'bold_mask_std', 'bold_mask_t1', 'bold_std', - 'bold_std_ref', 'bold_t1', 'bold_t1_ref', 'bold_native', 'bold_native_ref', - 'bold_mask_native', 'bold_echos_native', - 'cifti_variant', 'cifti_metadata', 'cifti_density', - 'confounds', 'confounds_metadata', 'melodic_mix', 'nonaggr_denoised_file', - 'source_file', 'all_source_files', - 'surf_files', 'surf_refs', 'template', 'spatial_reference', - 't2star_bold', 't2star_t1', 't2star_std', - 'bold2anat_xfm', 'anat2bold_xfm', 'acompcor_masks', 'tcompcor_mask']), - name='inputnode') + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + 'aroma_noise_ics', + 'bold_aparc_std', + 'bold_aparc_t1', + 'bold_aseg_std', + 'bold_aseg_t1', + 'bold_cifti', + 'bold_mask_std', + 'bold_mask_t1', + 'bold_std', + 'bold_std_ref', + 'bold_t1', + 'bold_t1_ref', + 'bold_native', + 'bold_native_ref', + 'bold_mask_native', + 'bold_echos_native', + 'cifti_variant', + 'cifti_metadata', + 'cifti_density', + 'confounds', + 'confounds_metadata', + 'melodic_mix', + 'nonaggr_denoised_file', + 'source_file', + 'all_source_files', + 'surf_files', + 'surf_refs', + 'template', + 'spatial_reference', + 't2star_bold', + 't2star_t1', + 't2star_std', + 'bold2anat_xfm', + 'anat2bold_xfm', + 'acompcor_masks', + 'tcompcor_mask', + ] + ), + name='inputnode', + ) raw_sources = pe.Node(niu.Function(function=_bids_relative), name='raw_sources') raw_sources.inputs.bids_root = bids_root - ds_confounds = pe.Node(DerivativesDataSink( - base_directory=output_dir, desc='confounds', suffix='timeseries', - dismiss_entities=("echo",)), - name="ds_confounds", run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + ds_confounds = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc='confounds', + suffix='timeseries', + dismiss_entities=("echo",), + ), + name="ds_confounds", + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_ref_t1w_xfm = pe.Node( - DerivativesDataSink(base_directory=output_dir, to='T1w', - mode='image', suffix='xfm', - extension='.txt', - dismiss_entities=('echo',), - **{'from': 'scanner'}), - name='ds_ref_t1w_xfm', run_without_submitting=True) + DerivativesDataSink( + base_directory=output_dir, + to='T1w', + mode='image', + suffix='xfm', + extension='.txt', + dismiss_entities=('echo',), + **{'from': 'scanner'}, + ), + name='ds_ref_t1w_xfm', + run_without_submitting=True, + ) ds_ref_t1w_inv_xfm = pe.Node( - DerivativesDataSink(base_directory=output_dir, to='scanner', - mode='image', suffix='xfm', - extension='.txt', - dismiss_entities=('echo',), - **{'from': 'T1w'}), - name='ds_t1w_tpl_inv_xfm', run_without_submitting=True) - + DerivativesDataSink( + base_directory=output_dir, + to='scanner', + mode='image', + suffix='xfm', + extension='.txt', + dismiss_entities=('echo',), + **{'from': 'T1w'}, + ), + name='ds_t1w_tpl_inv_xfm', + run_without_submitting=True, + ) + # fmt:off workflow.connect([ (inputnode, raw_sources, [('all_source_files', 'in_files')]), (inputnode, ds_confounds, [('source_file', 'source_file'), @@ -221,25 +276,46 @@ def init_func_derivatives_wf( (inputnode, ds_ref_t1w_inv_xfm, [('source_file', 'source_file'), ('anat2bold_xfm', 'in_file')]), ]) + # fmt:on if nonstd_spaces.intersection(('func', 'run', 'bold', 'boldref', 'sbref')): ds_bold_native = pe.Node( DerivativesDataSink( - base_directory=output_dir, desc='preproc', compress=True, SkullStripped=masked, - TaskName=metadata.get('TaskName'), **timing_parameters), - name='ds_bold_native', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + base_directory=output_dir, + desc='preproc', + compress=True, + SkullStripped=masked, + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), + name='ds_bold_native', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_native_ref = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix='boldref', compress=True, - dismiss_entities=("echo",)), - name='ds_bold_native_ref', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + DerivativesDataSink( + base_directory=output_dir, + suffix='boldref', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_native_ref', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_mask_native = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc='brain', suffix='mask', - compress=True, dismiss_entities=("echo",)), - name='ds_bold_mask_native', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - + DerivativesDataSink( + base_directory=output_dir, + desc='brain', + suffix='mask', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_mask_native', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_bold_native, [('source_file', 'source_file'), ('bold_native', 'in_file')]), @@ -249,57 +325,98 @@ def init_func_derivatives_wf( ('bold_mask_native', 'in_file')]), (raw_sources, ds_bold_mask_native, [('out', 'RawSources')]), ]) + # fmt:on if multiecho: ds_t2star_bold = pe.Node( - DerivativesDataSink(base_directory=output_dir, space='boldref', - suffix='T2starmap', compress=True, dismiss_entities=("echo",), - **t2star_meta), - name='ds_t2star_bold', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - + DerivativesDataSink( + base_directory=output_dir, + space='boldref', + suffix='T2starmap', + compress=True, + dismiss_entities=("echo",), + **t2star_meta, + ), + name='ds_t2star_bold', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_t2star_bold, [('source_file', 'source_file'), ('t2star_bold', 'in_file')]), (raw_sources, ds_t2star_bold, [('out', 'RawSources')]), ]) + # fmt:on if multiecho and config.execution.me_output_echos: ds_bold_echos_native = pe.MapNode( DerivativesDataSink( - base_directory=output_dir, desc='preproc', compress=True, SkullStripped=False, - TaskName=metadata.get('TaskName'), **timing_parameters), + base_directory=output_dir, + desc='preproc', + compress=True, + SkullStripped=False, + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), iterfield=['source_file', 'in_file', 'meta_dict'], - name='ds_bold_echos_native', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + name='ds_bold_echos_native', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_echos_native.inputs.meta_dict = [ {"EchoTime": md["EchoTime"]} for md in all_metadata ] - + # fmt:off workflow.connect([ (inputnode, ds_bold_echos_native, [ ('all_source_files', 'source_file'), ('bold_echos_native', 'in_file')]), ]) + # fmt:on # Resample to T1w space if nonstd_spaces.intersection(('T1w', 'anat')): ds_bold_t1 = pe.Node( DerivativesDataSink( - base_directory=output_dir, space='T1w', desc='preproc', compress=True, - SkullStripped=masked, TaskName=metadata.get('TaskName'), **timing_parameters), - name='ds_bold_t1', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + base_directory=output_dir, + space='T1w', + desc='preproc', + compress=True, + SkullStripped=masked, + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), + name='ds_bold_t1', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_t1_ref = pe.Node( - DerivativesDataSink(base_directory=output_dir, space='T1w', suffix='boldref', - compress=True, dismiss_entities=("echo",)), - name='ds_bold_t1_ref', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + DerivativesDataSink( + base_directory=output_dir, + space='T1w', + suffix='boldref', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_t1_ref', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_mask_t1 = pe.Node( - DerivativesDataSink(base_directory=output_dir, space='T1w', desc='brain', - suffix='mask', compress=True, dismiss_entities=("echo",)), - name='ds_bold_mask_t1', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + DerivativesDataSink( + base_directory=output_dir, + space='T1w', + desc='brain', + suffix='mask', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_mask_t1', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_bold_t1, [('source_file', 'source_file'), ('bold_t1', 'in_file')]), @@ -309,54 +426,98 @@ def init_func_derivatives_wf( ('bold_mask_t1', 'in_file')]), (raw_sources, ds_bold_mask_t1, [('out', 'RawSources')]), ]) + # fmt:on if freesurfer: - ds_bold_aseg_t1 = pe.Node(DerivativesDataSink( - base_directory=output_dir, space='T1w', desc='aseg', suffix='dseg', - compress=True, dismiss_entities=("echo",)), - name='ds_bold_aseg_t1', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - ds_bold_aparc_t1 = pe.Node(DerivativesDataSink( - base_directory=output_dir, space='T1w', desc='aparcaseg', suffix='dseg', - compress=True, dismiss_entities=("echo",)), - name='ds_bold_aparc_t1', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + ds_bold_aseg_t1 = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + space='T1w', + desc='aseg', + suffix='dseg', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_aseg_t1', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + ds_bold_aparc_t1 = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + space='T1w', + desc='aparcaseg', + suffix='dseg', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_aparc_t1', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_bold_aseg_t1, [('source_file', 'source_file'), ('bold_aseg_t1', 'in_file')]), (inputnode, ds_bold_aparc_t1, [('source_file', 'source_file'), ('bold_aparc_t1', 'in_file')]), ]) + # fmt:on if multiecho: ds_t2star_t1 = pe.Node( - DerivativesDataSink(base_directory=output_dir, space='T1w', - suffix='T2starmap', compress=True, dismiss_entities=("echo",), - **t2star_meta), - name='ds_t2star_t1', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - + DerivativesDataSink( + base_directory=output_dir, + space='T1w', + suffix='T2starmap', + compress=True, + dismiss_entities=("echo",), + **t2star_meta, + ), + name='ds_t2star_t1', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_t2star_t1, [('source_file', 'source_file'), ('t2star_t1', 'in_file')]), (raw_sources, ds_t2star_t1, [('out', 'RawSources')]), ]) + # fmt:on if use_aroma: - ds_aroma_noise_ics = pe.Node(DerivativesDataSink( - base_directory=output_dir, suffix='AROMAnoiseICs', dismiss_entities=("echo",)), - name="ds_aroma_noise_ics", run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - ds_melodic_mix = pe.Node(DerivativesDataSink( - base_directory=output_dir, desc='MELODIC', suffix='mixing', - dismiss_entities=("echo",)), - name="ds_melodic_mix", run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + ds_aroma_noise_ics = pe.Node( + DerivativesDataSink( + base_directory=output_dir, suffix='AROMAnoiseICs', dismiss_entities=("echo",) + ), + name="ds_aroma_noise_ics", + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + ds_melodic_mix = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc='MELODIC', + suffix='mixing', + dismiss_entities=("echo",), + ), + name="ds_melodic_mix", + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_aroma_std = pe.Node( DerivativesDataSink( - base_directory=output_dir, space='MNI152NLin6Asym', desc='smoothAROMAnonaggr', - compress=True, TaskName=metadata.get('TaskName'), **timing_parameters), - name='ds_aroma_std', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - + base_directory=output_dir, + space='MNI152NLin6Asym', + desc='smoothAROMAnonaggr', + compress=True, + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), + name='ds_aroma_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_aroma_noise_ics, [('source_file', 'source_file'), ('aroma_noise_ics', 'in_file')]), @@ -365,6 +526,7 @@ def init_func_derivatives_wf( (inputnode, ds_aroma_std, [('source_file', 'source_file'), ('nonaggr_denoised_file', 'in_file')]), ]) + # fmt:on if getattr(spaces, '_cached') is None: return workflow @@ -373,33 +535,59 @@ def init_func_derivatives_wf( if spaces.cached.references: from niworkflows.interfaces.space import SpaceDataSource - spacesource = pe.Node(SpaceDataSource(), - name='spacesource', run_without_submitting=True) - spacesource.iterables = ('in_tuple', [ - (s.fullname, s.spec) for s in spaces.cached.get_standard(dim=(3,)) - ]) + spacesource = pe.Node(SpaceDataSource(), name='spacesource', run_without_submitting=True) + spacesource.iterables = ( + 'in_tuple', + [(s.fullname, s.spec) for s in spaces.cached.get_standard(dim=(3,))], + ) fields = ['template', 'bold_std', 'bold_std_ref', 'bold_mask_std'] if multiecho: fields.append('t2star_std') - select_std = pe.Node(KeySelect(fields=fields), - name='select_std', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + select_std = pe.Node( + KeySelect(fields=fields), + name='select_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_std = pe.Node( DerivativesDataSink( - base_directory=output_dir, desc='preproc', compress=True, SkullStripped=masked, - TaskName=metadata.get('TaskName'), **timing_parameters), - name='ds_bold_std', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) + base_directory=output_dir, + desc='preproc', + compress=True, + SkullStripped=masked, + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), + name='ds_bold_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_std_ref = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix='boldref', compress=True, - dismiss_entities=("echo",)), - name='ds_bold_std_ref', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) + DerivativesDataSink( + base_directory=output_dir, + suffix='boldref', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_std_ref', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) ds_bold_mask_std = pe.Node( - DerivativesDataSink(base_directory=output_dir, desc='brain', suffix='mask', - compress=True, dismiss_entities=("echo",)), - name='ds_bold_mask_std', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) - + DerivativesDataSink( + base_directory=output_dir, + desc='brain', + suffix='mask', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_mask_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_bold_std, [('source_file', 'source_file')]), (inputnode, ds_bold_std_ref, [('source_file', 'source_file')]), @@ -428,21 +616,39 @@ def init_func_derivatives_wf( ('density', 'density')]), (raw_sources, ds_bold_mask_std, [('out', 'RawSources')]), ]) - + # fmt:on if freesurfer: - select_fs_std = pe.Node(KeySelect( - fields=['bold_aseg_std', 'bold_aparc_std', 'template']), - name='select_fs_std', run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) - ds_bold_aseg_std = pe.Node(DerivativesDataSink( - base_directory=output_dir, desc='aseg', suffix='dseg', compress=True, - dismiss_entities=("echo",)), - name='ds_bold_aseg_std', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - ds_bold_aparc_std = pe.Node(DerivativesDataSink( - base_directory=output_dir, desc='aparcaseg', suffix='dseg', compress=True, - dismiss_entities=("echo",)), - name='ds_bold_aparc_std', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + select_fs_std = pe.Node( + KeySelect(fields=['bold_aseg_std', 'bold_aparc_std', 'template']), + name='select_fs_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + ds_bold_aseg_std = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc='aseg', + suffix='dseg', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_aseg_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + ds_bold_aparc_std = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + desc='aparcaseg', + suffix='dseg', + compress=True, + dismiss_entities=("echo",), + ), + name='ds_bold_aparc_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (spacesource, select_fs_std, [('uid', 'key')]), (inputnode, select_fs_std, [('bold_aseg_std', 'bold_aseg_std'), @@ -462,15 +668,21 @@ def init_func_derivatives_wf( (inputnode, ds_bold_aseg_std, [('source_file', 'source_file')]), (inputnode, ds_bold_aparc_std, [('source_file', 'source_file')]) ]) - + # fmt:on if multiecho: ds_t2star_std = pe.Node( - DerivativesDataSink(base_directory=output_dir, suffix='T2starmap', - compress=True, dismiss_entities=("echo",), - **t2star_meta), - name='ds_t2star_std', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) - + DerivativesDataSink( + base_directory=output_dir, + suffix='T2starmap', + compress=True, + dismiss_entities=("echo",), + **t2star_meta, + ), + name='ds_t2star_std', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_t2star_std, [('source_file', 'source_file')]), (select_std, ds_t2star_std, [('t2star_std', 'in_file')]), @@ -480,27 +692,41 @@ def init_func_derivatives_wf( ('density', 'density')]), (raw_sources, ds_t2star_std, [('out', 'RawSources')]), ]) + # fmt:on fs_outputs = spaces.cached.get_fs_spaces() if freesurfer and fs_outputs: from niworkflows.interfaces.surf import Path2BIDS - select_fs_surf = pe.Node(KeySelect( - fields=['surfaces', 'surf_kwargs']), name='select_fs_surf', - run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) + select_fs_surf = pe.Node( + KeySelect(fields=['surfaces', 'surf_kwargs']), + name='select_fs_surf', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) select_fs_surf.iterables = [('key', fs_outputs)] select_fs_surf.inputs.surf_kwargs = [{'space': s} for s in fs_outputs] - name_surfs = pe.MapNode(Path2BIDS(pattern=r'(?P[lr])h.\w+'), - iterfield='in_file', name='name_surfs', - run_without_submitting=True) - - ds_bold_surfs = pe.MapNode(DerivativesDataSink( - base_directory=output_dir, extension=".func.gii", - TaskName=metadata.get('TaskName'), **timing_parameters), - iterfield=['in_file', 'hemi'], name='ds_bold_surfs', - run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) + name_surfs = pe.MapNode( + Path2BIDS(pattern=r'(?P[lr])h.\w+'), + iterfield='in_file', + name='name_surfs', + run_without_submitting=True, + ) + ds_bold_surfs = pe.MapNode( + DerivativesDataSink( + base_directory=output_dir, + extension=".func.gii", + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), + iterfield=['in_file', 'hemi'], + name='ds_bold_surfs', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, select_fs_surf, [ ('surf_files', 'surfaces'), @@ -511,14 +737,23 @@ def init_func_derivatives_wf( ('key', 'space')]), (name_surfs, ds_bold_surfs, [('hemi', 'hemi')]), ]) + # fmt:on # CIFTI output if cifti_output: - ds_bold_cifti = pe.Node(DerivativesDataSink( - base_directory=output_dir, suffix='bold', compress=False, - TaskName=metadata.get('TaskName'), **timing_parameters), - name='ds_bold_cifti', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + ds_bold_cifti = pe.Node( + DerivativesDataSink( + base_directory=output_dir, + suffix='bold', + compress=False, + TaskName=metadata.get('TaskName'), + **timing_parameters, + ), + name='ds_bold_cifti', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, ds_bold_cifti, [(('bold_cifti', _unlist), 'in_file'), ('source_file', 'source_file'), @@ -526,23 +761,34 @@ def init_func_derivatives_wf( ('cifti_density', 'density'), (('cifti_metadata', _read_json), 'meta_dict')]) ]) + # fmt:on if "compcor" in config.execution.debug: ds_acompcor_masks = pe.Node( DerivativesDataSink( - base_directory=output_dir, desc=[f"CompCor{_}" for _ in "CWA"], - suffix="mask", compress=True), - name="ds_acompcor_masks", run_without_submitting=True) + base_directory=output_dir, + desc=[f"CompCor{_}" for _ in "CWA"], + suffix="mask", + compress=True, + ), + name="ds_acompcor_masks", + run_without_submitting=True, + ) ds_tcompcor_mask = pe.Node( DerivativesDataSink( - base_directory=output_dir, desc="CompCorT", suffix="mask", compress=True), - name="ds_tcompcor_mask", run_without_submitting=True) + base_directory=output_dir, desc="CompCorT", suffix="mask", compress=True + ), + name="ds_tcompcor_mask", + run_without_submitting=True, + ) + # fmt:off workflow.connect([ (inputnode, ds_acompcor_masks, [("acompcor_masks", "in_file"), ("source_file", "source_file")]), (inputnode, ds_tcompcor_mask, [("tcompcor_mask", "in_file"), ("source_file", "source_file")]), ]) + # fmt:on return workflow @@ -585,25 +831,31 @@ def init_bold_preproc_report_wf(mem_gb, reportlets_dir, name='bold_preproc_repor from nipype.algorithms.confounds import TSNR from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.reportlets.registration import SimpleBeforeAfterRPT + from ...interfaces import DerivativesDataSink workflow = Workflow(name=name) - inputnode = pe.Node(niu.IdentityInterface( - fields=['in_pre', 'in_post', 'name_source']), name='inputnode') + inputnode = pe.Node( + niu.IdentityInterface(fields=['in_pre', 'in_post', 'name_source']), name='inputnode' + ) pre_tsnr = pe.Node(TSNR(), name='pre_tsnr', mem_gb=mem_gb * 4.5) pos_tsnr = pe.Node(TSNR(), name='pos_tsnr', mem_gb=mem_gb * 4.5) - bold_rpt = pe.Node(SimpleBeforeAfterRPT(), name='bold_rpt', - mem_gb=0.1) + bold_rpt = pe.Node(SimpleBeforeAfterRPT(), name='bold_rpt', mem_gb=0.1) ds_report_bold = pe.Node( - DerivativesDataSink(base_directory=reportlets_dir, desc='preproc', - datatype="figures", dismiss_entities=("echo",)), - name='ds_report_bold', mem_gb=DEFAULT_MEMORY_MIN_GB, - run_without_submitting=True + DerivativesDataSink( + base_directory=reportlets_dir, + desc='preproc', + datatype="figures", + dismiss_entities=("echo",), + ), + name='ds_report_bold', + mem_gb=DEFAULT_MEMORY_MIN_GB, + run_without_submitting=True, ) - + # fmt:off workflow.connect([ (inputnode, ds_report_bold, [('name_source', 'source_file')]), (inputnode, pre_tsnr, [('in_pre', 'in_file')]), @@ -612,6 +864,7 @@ def init_bold_preproc_report_wf(mem_gb, reportlets_dir, name='bold_preproc_repor (pos_tsnr, bold_rpt, [('stddev_file', 'after')]), (bold_rpt, ds_report_bold, [('out_report', 'in_file')]), ]) + # fmt:on return workflow @@ -623,12 +876,14 @@ def _unlist(in_file): def _get_surface(in_file): - from pathlib import Path from json import loads + from pathlib import Path + return loads(Path(in_file).read_text())["surface"] def _read_json(in_file): - from pathlib import Path from json import loads + from pathlib import Path + return loads(Path(in_file).read_text()) diff --git a/fmriprep/workflows/bold/registration.py b/fmriprep/workflows/bold/registration.py index 3585465ec..3b7ce92fa 100644 --- a/fmriprep/workflows/bold/registration.py +++ b/fmriprep/workflows/bold/registration.py @@ -30,16 +30,15 @@ .. autofunction:: init_fsl_bbr_wf """ -from ... import config - import os import os.path as op import pkg_resources as pkgr - +from nipype.interfaces import c3, fsl +from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, fsl, c3 +from ... import config from ...interfaces import DerivativesDataSink DEFAULT_MEMORY_MIN_GB = config.DEFAULT_MEMORY_MIN_GB @@ -47,16 +46,16 @@ def init_bold_reg_wf( - freesurfer, - use_bbr, - bold2t1w_dof, - bold2t1w_init, - mem_gb, - omp_nthreads, - name='bold_reg_wf', - sloppy=False, - use_compression=True, - write_report=True, + freesurfer, + use_bbr, + bold2t1w_dof, + bold2t1w_init, + mem_gb, + omp_nthreads, + name='bold_reg_wf', + sloppy=False, + use_compression=True, + write_report=True, ): """ Build a workflow to run same-subject, BOLD-to-T1w image-registration. @@ -144,25 +143,39 @@ def init_bold_reg_wf( workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface( - fields=['ref_bold_brain', 't1w_brain', 't1w_dseg', - 'subjects_dir', 'subject_id', 'fsnative2t1w_xfm']), - name='inputnode' + fields=[ + 'ref_bold_brain', + 't1w_brain', + 't1w_dseg', + 'subjects_dir', + 'subject_id', + 'fsnative2t1w_xfm', + ] + ), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'itk_bold_to_t1', 'itk_t1_to_bold', 'fallback']), - name='outputnode' + niu.IdentityInterface(fields=['itk_bold_to_t1', 'itk_t1_to_bold', 'fallback']), + name='outputnode', ) if freesurfer: - bbr_wf = init_bbreg_wf(use_bbr=use_bbr, bold2t1w_dof=bold2t1w_dof, - bold2t1w_init=bold2t1w_init, omp_nthreads=omp_nthreads) + bbr_wf = init_bbreg_wf( + use_bbr=use_bbr, + bold2t1w_dof=bold2t1w_dof, + bold2t1w_init=bold2t1w_init, + omp_nthreads=omp_nthreads, + ) else: - bbr_wf = init_fsl_bbr_wf(use_bbr=use_bbr, bold2t1w_dof=bold2t1w_dof, - bold2t1w_init=bold2t1w_init, sloppy=sloppy, - omp_nthreads=omp_nthreads) - + bbr_wf = init_fsl_bbr_wf( + use_bbr=use_bbr, + bold2t1w_dof=bold2t1w_dof, + bold2t1w_init=bold2t1w_init, + sloppy=sloppy, + omp_nthreads=omp_nthreads, + ) + # fmt:off workflow.connect([ (inputnode, bbr_wf, [ ('ref_bold_brain', 'inputnode.in_file'), @@ -175,29 +188,35 @@ def init_bold_reg_wf( ('outputnode.itk_t1_to_bold', 'itk_t1_to_bold'), ('outputnode.fallback', 'fallback')]), ]) + # fmt:on if write_report: ds_report_reg = pe.Node( DerivativesDataSink(datatype="figures", dismiss_entities=("echo",)), - name='ds_report_reg', run_without_submitting=True, - mem_gb=DEFAULT_MEMORY_MIN_GB) + name='ds_report_reg', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) def _bold_reg_suffix(fallback, freesurfer): if fallback: return 'coreg' if freesurfer else 'flirtnobbr' return 'bbregister' if freesurfer else 'flirtbbr' + # fmt:off workflow.connect([ (bbr_wf, ds_report_reg, [ ('outputnode.out_report', 'in_file'), (('outputnode.fallback', _bold_reg_suffix, freesurfer), 'desc')]), ]) + # fmt:on return workflow -def init_bold_t1_trans_wf(freesurfer, mem_gb, omp_nthreads, use_compression=True, - name='bold_t1_trans_wf'): +def init_bold_t1_trans_wf( + freesurfer, mem_gb, omp_nthreads, use_compression=True, name='bold_t1_trans_wf' +): """ Co-register the reference BOLD image to T1w-space. @@ -276,37 +295,50 @@ def init_bold_t1_trans_wf(freesurfer, mem_gb, omp_nthreads, use_compression=True * :py:func:`~fmriprep.workflows.bold.registration.init_fsl_bbr_wf` """ - from fmriprep.interfaces.maths import Clip from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.func.util import init_bold_reference_wf from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.itk import MultiApplyTransforms - from niworkflows.interfaces.nilearn import Merge from niworkflows.interfaces.nibabel import GenerateSamplingReference + from niworkflows.interfaces.nilearn import Merge + + from fmriprep.interfaces.maths import Clip workflow = Workflow(name=name) inputnode = pe.Node( niu.IdentityInterface( - fields=['name_source', 'ref_bold_brain', 'ref_bold_mask', - 't1w_brain', 't1w_mask', 't1w_aseg', 't1w_aparc', - 'bold_split', 'fieldwarp', 'hmc_xforms', - 'itk_bold_to_t1']), - name='inputnode' + fields=[ + 'name_source', + 'ref_bold_brain', + 'ref_bold_mask', + 't1w_brain', + 't1w_mask', + 't1w_aseg', + 't1w_aparc', + 'bold_split', + 'fieldwarp', + 'hmc_xforms', + 'itk_bold_to_t1', + ] + ), + name='inputnode', ) outputnode = pe.Node( - niu.IdentityInterface(fields=[ - 'bold_t1', 'bold_t1_ref', 'bold_mask_t1', - 'bold_aseg_t1', 'bold_aparc_t1']), - name='outputnode' + niu.IdentityInterface( + fields=['bold_t1', 'bold_t1_ref', 'bold_mask_t1', 'bold_aseg_t1', 'bold_aparc_t1'] + ), + name='outputnode', ) - gen_ref = pe.Node(GenerateSamplingReference(), name='gen_ref', - mem_gb=0.3) # 256x256x256 * 64 / 8 ~ 150MB - - mask_t1w_tfm = pe.Node(ApplyTransforms(interpolation='MultiLabel'), - name='mask_t1w_tfm', mem_gb=0.1) + gen_ref = pe.Node( + GenerateSamplingReference(), name='gen_ref', mem_gb=0.3 + ) # 256x256x256 * 64 / 8 ~ 150MB + mask_t1w_tfm = pe.Node( + ApplyTransforms(interpolation='MultiLabel'), name='mask_t1w_tfm', mem_gb=0.1 + ) + # fmt:off workflow.connect([ (inputnode, gen_ref, [('ref_bold_brain', 'moving_image'), ('t1w_brain', 'fixed_image'), @@ -316,16 +348,20 @@ def init_bold_t1_trans_wf(freesurfer, mem_gb, omp_nthreads, use_compression=True (inputnode, mask_t1w_tfm, [('itk_bold_to_t1', 'transforms')]), (mask_t1w_tfm, outputnode, [('output_image', 'bold_mask_t1')]), ]) - + # fmt:on if freesurfer: # Resample aseg and aparc in T1w space (no transforms needed) aseg_t1w_tfm = pe.Node( ApplyTransforms(interpolation='MultiLabel', transforms='identity'), - name='aseg_t1w_tfm', mem_gb=0.1) + name='aseg_t1w_tfm', + mem_gb=0.1, + ) aparc_t1w_tfm = pe.Node( ApplyTransforms(interpolation='MultiLabel', transforms='identity'), - name='aparc_t1w_tfm', mem_gb=0.1) - + name='aparc_t1w_tfm', + mem_gb=0.1, + ) + # fmt:off workflow.connect([ (inputnode, aseg_t1w_tfm, [('t1w_aseg', 'input_image')]), (inputnode, aparc_t1w_tfm, [('t1w_aparc', 'input_image')]), @@ -334,17 +370,19 @@ def init_bold_t1_trans_wf(freesurfer, mem_gb, omp_nthreads, use_compression=True (aseg_t1w_tfm, outputnode, [('output_image', 'bold_aseg_t1')]), (aparc_t1w_tfm, outputnode, [('output_image', 'bold_aparc_t1')]), ]) + # fmt:on bold_to_t1w_transform = pe.Node( MultiApplyTransforms(interpolation="LanczosWindowedSinc", float=True, copy_dtype=True), - name='bold_to_t1w_transform', mem_gb=mem_gb * 3 * omp_nthreads, n_procs=omp_nthreads) + name='bold_to_t1w_transform', + mem_gb=mem_gb * 3 * omp_nthreads, + n_procs=omp_nthreads, + ) # Interpolation can occasionally produce below-zero values as an artifact threshold = pe.MapNode( - Clip(minimum=0), - name="threshold", - iterfield=['in_file'], - mem_gb=DEFAULT_MEMORY_MIN_GB) + Clip(minimum=0), name="threshold", iterfield=['in_file'], mem_gb=DEFAULT_MEMORY_MIN_GB + ) # merge 3D volumes into 4D timeseries merge = pe.Node(Merge(compress=use_compression), name='merge', mem_gb=mem_gb) @@ -353,9 +391,13 @@ def init_bold_t1_trans_wf(freesurfer, mem_gb, omp_nthreads, use_compression=True gen_final_ref = init_bold_reference_wf(omp_nthreads, pre_mask=True) # Merge transforms placing the head motion correction last - merge_xforms = pe.Node(niu.Merge(3), name='merge_xforms', - run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB) - + merge_xforms = pe.Node( + niu.Merge(3), + name='merge_xforms', + run_without_submitting=True, + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, merge, [('name_source', 'header_source')]), (inputnode, merge_xforms, [ @@ -372,6 +414,7 @@ def init_bold_t1_trans_wf(freesurfer, mem_gb, omp_nthreads, use_compression=True (merge, outputnode, [('out_file', 'bold_t1')]), (gen_final_ref, outputnode, [('outputnode.ref_image', 'bold_t1_ref')]), ]) + # fmt:on return workflow @@ -446,12 +489,11 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow + # See https://github.com/nipreps/fmriprep/issues/768 - from niworkflows.interfaces.freesurfer import ( - PatchedBBRegisterRPT as BBRegisterRPT, - PatchedMRICoregRPT as MRICoregRPT, - PatchedLTAConvert as LTAConvert - ) + from niworkflows.interfaces.freesurfer import PatchedBBRegisterRPT as BBRegisterRPT + from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert + from niworkflows.interfaces.freesurfer import PatchedMRICoregRPT as MRICoregRPT from niworkflows.interfaces.nitransforms import ConcatenateXFMs workflow = Workflow(name=name) @@ -459,19 +501,30 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre The BOLD reference was then co-registered to the T1w reference using `bbregister` (FreeSurfer) which implements boundary-based registration [@bbr]. Co-registration was configured with {dof} degrees of freedom{reason}. -""".format(dof={6: 'six', 9: 'nine', 12: 'twelve'}[bold2t1w_dof], - reason='' if bold2t1w_dof == 6 else - 'to account for distortions remaining in the BOLD reference') +""".format( + dof={6: 'six', 9: 'nine', 12: 'twelve'}[bold2t1w_dof], + reason='' + if bold2t1w_dof == 6 + else 'to account for distortions remaining in the BOLD reference', + ) inputnode = pe.Node( - niu.IdentityInterface([ - 'in_file', - 'fsnative2t1w_xfm', 'subjects_dir', 'subject_id', # BBRegister - 't1w_dseg', 't1w_brain']), # FLIRT BBR - name='inputnode') + niu.IdentityInterface( + [ + 'in_file', + 'fsnative2t1w_xfm', + 'subjects_dir', + 'subject_id', # BBRegister + 't1w_dseg', + 't1w_brain', + ] + ), # FLIRT BBR + name='inputnode', + ) outputnode = pe.Node( niu.IdentityInterface(['itk_bold_to_t1', 'itk_t1_to_bold', 'out_report', 'fallback']), - name='outputnode') + name='outputnode', + ) if bold2t1w_init not in ("register", "header"): raise ValueError(f"Unknown BOLD-T1w initialization option: {bold2t1w_init}") @@ -487,9 +540,13 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre # Define both nodes, but only connect conditionally mri_coreg = pe.Node( - MRICoregRPT(dof=bold2t1w_dof, sep=[4], ftol=0.0001, linmintol=0.01, - generate_report=not use_bbr), - name='mri_coreg', n_procs=omp_nthreads, mem_gb=5) + MRICoregRPT( + dof=bold2t1w_dof, sep=[4], ftol=0.0001, linmintol=0.01, generate_report=not use_bbr + ), + name='mri_coreg', + n_procs=omp_nthreads, + mem_gb=5, + ) bbregister = pe.Node( BBRegisterRPT( @@ -497,26 +554,26 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre contrast_type='t2', registered_file=True, out_lta_file=True, - generate_report=True + generate_report=True, ), - name='bbregister', mem_gb=12 + name='bbregister', + mem_gb=12, ) if bold2t1w_init == "header": bbregister.inputs.init = "header" transforms = pe.Node(niu.Merge(2), run_without_submitting=True, name='transforms') - lta_ras2ras = pe.MapNode(LTAConvert(out_lta=True), iterfield=['in_lta'], - name='lta_ras2ras', mem_gb=2) + lta_ras2ras = pe.MapNode( + LTAConvert(out_lta=True), iterfield=['in_lta'], name='lta_ras2ras', mem_gb=2 + ) # In cases where Merge(2) only has `in1` or `in2` defined # output list will just contain a single element select_transform = pe.Node( - niu.Select(index=0), - run_without_submitting=True, - name='select_transform' + niu.Select(index=0), run_without_submitting=True, name='select_transform' ) merge_ltas = pe.Node(niu.Merge(2), name='merge_ltas', run_without_submitting=True) concat_xfm = pe.Node(ConcatenateXFMs(inverse=True), name='concat_xfm') - + # fmt:off workflow.connect([ (inputnode, merge_ltas, [('fsnative2t1w_xfm', 'in2')]), # Wire up the co-registration alternatives @@ -527,21 +584,20 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre (concat_xfm, outputnode, [('out_xfm', 'itk_bold_to_t1')]), (concat_xfm, outputnode, [('out_inv', 'itk_t1_to_bold')]), ]) - + # fmt:on # Do not initialize with header, use mri_coreg if bold2t1w_init == "register": + # fmt:off workflow.connect([ (inputnode, mri_coreg, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), ('in_file', 'source_file')]), (mri_coreg, transforms, [('out_lta_file', 'in2')]), ]) - + # fmt:on # Short-circuit workflow building, use initial registration if use_bbr is False: - workflow.connect([ - (mri_coreg, outputnode, [('out_report', 'out_report')]), - ]) + workflow.connect(mri_coreg, 'out_report', outputnode, 'out_report') outputnode.inputs.fallback = True return workflow @@ -550,18 +606,17 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre workflow.connect(mri_coreg, 'out_lta_file', bbregister, 'init_reg_file') # Use bbregister + # fmt:off workflow.connect([ (inputnode, bbregister, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id'), ('in_file', 'source_file')]), (bbregister, transforms, [('out_lta_file', 'in1')]), ]) - + # fmt:on # Short-circuit workflow building, use boundary-based registration if use_bbr is True: - workflow.connect([ - (bbregister, outputnode, [('out_report', 'out_report')]), - ]) + workflow.connect(bbregister, 'out_report', outputnode, 'out_report') outputnode.inputs.fallback = False return workflow @@ -571,7 +626,7 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre compare_transforms = pe.Node(niu.Function(function=compare_xforms), name='compare_transforms') select_report = pe.Node(niu.Select(), run_without_submitting=True, name='select_report') - + # fmt:off workflow.connect([ # Normalize LTA transforms to RAS2RAS (inputs are VOX2VOX) and compare (lta_ras2ras, compare_transforms, [('out_lta', 'lta_list')]), @@ -585,12 +640,14 @@ def init_bbreg_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, name='bbre (compare_transforms, select_report, [('out', 'index')]), (select_report, outputnode, [('out', 'out_report')]), ]) + # fmt:on return workflow -def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=False, - name='fsl_bbr_wf'): +def init_fsl_bbr_wf( + use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=False, name='fsl_bbr_wf' +): """ Build a workflow to run FSL's ``flirt``. @@ -658,32 +715,42 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=F """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow - from niworkflows.utils.images import dseg_label as _dseg_label - from niworkflows.interfaces.freesurfer import ( - PatchedLTAConvert as LTAConvert, - PatchedMRICoregRPT as MRICoregRPT, - ) + from niworkflows.interfaces.freesurfer import PatchedLTAConvert as LTAConvert + from niworkflows.interfaces.freesurfer import PatchedMRICoregRPT as MRICoregRPT from niworkflows.interfaces.reportlets.registration import FLIRTRPT + from niworkflows.utils.images import dseg_label as _dseg_label + workflow = Workflow(name=name) workflow.__desc__ = """\ The BOLD reference was then co-registered to the T1w reference using `mri_coreg` (FreeSurfer) followed by `flirt` [FSL {fsl_ver}, @flirt] with the boundary-based registration [@bbr] cost-function. Co-registration was configured with {dof} degrees of freedom{reason}. -""".format(fsl_ver=FLIRTRPT().version or '', - dof={6: 'six', 9: 'nine', 12: 'twelve'}[bold2t1w_dof], - reason='' if bold2t1w_dof == 6 else - 'to account for distortions remaining in the BOLD reference') +""".format( + fsl_ver=FLIRTRPT().version or '', + dof={6: 'six', 9: 'nine', 12: 'twelve'}[bold2t1w_dof], + reason='' + if bold2t1w_dof == 6 + else 'to account for distortions remaining in the BOLD reference', + ) inputnode = pe.Node( - niu.IdentityInterface([ - 'in_file', - 'fsnative2t1w_xfm', 'subjects_dir', 'subject_id', # BBRegister - 't1w_dseg', 't1w_brain']), # FLIRT BBR - name='inputnode') + niu.IdentityInterface( + [ + 'in_file', + 'fsnative2t1w_xfm', + 'subjects_dir', + 'subject_id', # BBRegister + 't1w_dseg', + 't1w_brain', + ] + ), # FLIRT BBR + name='inputnode', + ) outputnode = pe.Node( niu.IdentityInterface(['itk_bold_to_t1', 'itk_t1_to_bold', 'out_report', 'fallback']), - name='outputnode') + name='outputnode', + ) wm_mask = pe.Node(niu.Function(function=_dseg_label), name='wm_mask') wm_mask.inputs.label = 2 # BIDS default is WM=2 @@ -695,26 +762,33 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=F raise NotImplementedError("Header-based registration initialization not supported for FSL") mri_coreg = pe.Node( - MRICoregRPT(dof=bold2t1w_dof, sep=[4], ftol=0.0001, linmintol=0.01, - generate_report=not use_bbr), - name='mri_coreg', n_procs=omp_nthreads, mem_gb=5) + MRICoregRPT( + dof=bold2t1w_dof, sep=[4], ftol=0.0001, linmintol=0.01, generate_report=not use_bbr + ), + name='mri_coreg', + n_procs=omp_nthreads, + mem_gb=5, + ) - lta_to_fsl = pe.Node(LTAConvert(out_fsl=True), name='lta_to_fsl', - mem_gb=DEFAULT_MEMORY_MIN_GB) - workflow.connect([ - (mri_coreg, lta_to_fsl, [('out_lta_file', 'in_lta')]), - ]) + lta_to_fsl = pe.Node(LTAConvert(out_fsl=True), name='lta_to_fsl', mem_gb=DEFAULT_MEMORY_MIN_GB) - invt_bbr = pe.Node(fsl.ConvertXFM(invert_xfm=True), name='invt_bbr', - mem_gb=DEFAULT_MEMORY_MIN_GB) + invt_bbr = pe.Node( + fsl.ConvertXFM(invert_xfm=True), name='invt_bbr', mem_gb=DEFAULT_MEMORY_MIN_GB + ) # BOLD to T1 transform matrix is from fsl, using c3 tools to convert to # something ANTs will like. - fsl2itk_fwd = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True), - name='fsl2itk_fwd', mem_gb=DEFAULT_MEMORY_MIN_GB) - fsl2itk_inv = pe.Node(c3.C3dAffineTool(fsl2ras=True, itk_transform=True), - name='fsl2itk_inv', mem_gb=DEFAULT_MEMORY_MIN_GB) - + fsl2itk_fwd = pe.Node( + c3.C3dAffineTool(fsl2ras=True, itk_transform=True), + name='fsl2itk_fwd', + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + fsl2itk_inv = pe.Node( + c3.C3dAffineTool(fsl2ras=True, itk_transform=True), + name='fsl2itk_inv', + mem_gb=DEFAULT_MEMORY_MIN_GB, + ) + # fmt:off workflow.connect([ (inputnode, mri_coreg, [('in_file', 'source_file'), ('t1w_brain', 'reference_file')]), @@ -722,25 +796,30 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=F ('in_file', 'source_file')]), (inputnode, fsl2itk_inv, [('in_file', 'reference_file'), ('t1w_brain', 'source_file')]), + (mri_coreg, lta_to_fsl, [('out_lta_file', 'in_lta')]), (invt_bbr, fsl2itk_inv, [('out_file', 'transform_file')]), (fsl2itk_fwd, outputnode, [('itk_transform', 'itk_bold_to_t1')]), (fsl2itk_inv, outputnode, [('itk_transform', 'itk_t1_to_bold')]), ]) + # fmt:on # Short-circuit workflow building, use rigid registration if use_bbr is False: + # fmt:off workflow.connect([ (lta_to_fsl, invt_bbr, [('out_fsl', 'in_file')]), (lta_to_fsl, fsl2itk_fwd, [('out_fsl', 'transform_file')]), (mri_coreg, outputnode, [('out_report', 'out_report')]), ]) + # fmt:on outputnode.inputs.fallback = True return workflow flt_bbr = pe.Node( FLIRTRPT(cost_func='bbr', dof=bold2t1w_dof, args="-basescale 1", generate_report=True), - name='flt_bbr') + name='flt_bbr', + ) FSLDIR = os.getenv('FSLDIR') if FSLDIR: @@ -749,36 +828,45 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=F # Should mostly be hit while building docs LOGGER.warning("FSLDIR unset - using packaged BBR schedule") flt_bbr.inputs.schedule = pkgr.resource_filename('fmriprep', 'data/flirtsch/bbr.sch') - + # fmt:off workflow.connect([ (inputnode, wm_mask, [('t1w_dseg', 'in_seg')]), (inputnode, flt_bbr, [('in_file', 'in_file')]), (lta_to_fsl, flt_bbr, [('out_fsl', 'in_matrix_file')]), ]) - + # fmt:on if sloppy is True: - downsample = pe.Node(niu.Function( - function=_conditional_downsampling, output_names=["out_file", "out_mask"]), - name='downsample') + downsample = pe.Node( + niu.Function( + function=_conditional_downsampling, output_names=["out_file", "out_mask"] + ), + name='downsample', + ) + # fmt:off workflow.connect([ (inputnode, downsample, [("t1w_brain", "in_file")]), (wm_mask, downsample, [("out", "in_mask")]), (downsample, flt_bbr, [('out_file', 'reference'), ('out_mask', 'wm_seg')]), ]) + # fmt:on else: + # fmt:off workflow.connect([ (inputnode, flt_bbr, [('t1w_brain', 'reference')]), (wm_mask, flt_bbr, [('out', 'wm_seg')]), ]) + # fmt:on # Short-circuit workflow building, use boundary-based registration if use_bbr is True: + # fmt:off workflow.connect([ (flt_bbr, invt_bbr, [('out_matrix_file', 'in_file')]), (flt_bbr, fsl2itk_fwd, [('out_matrix_file', 'transform_file')]), (flt_bbr, outputnode, [('out_report', 'out_report')]), ]) + # fmt:on outputnode.inputs.fallback = False return workflow @@ -791,9 +879,8 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=F select_transform = pe.Node(niu.Select(), run_without_submitting=True, name='select_transform') select_report = pe.Node(niu.Select(), run_without_submitting=True, name='select_report') - fsl_to_lta = pe.MapNode(LTAConvert(out_lta=True), iterfield=['in_fsl'], - name='fsl_to_lta') - + fsl_to_lta = pe.MapNode(LTAConvert(out_lta=True), iterfield=['in_fsl'], name='fsl_to_lta') + # fmt:off workflow.connect([ (flt_bbr, transforms, [('out_matrix_file', 'in1')]), (lta_to_fsl, transforms, [('out_fsl', 'in2')]), @@ -814,6 +901,7 @@ def init_fsl_bbr_wf(use_bbr, bold2t1w_dof, bold2t1w_init, omp_nthreads, sloppy=F (compare_transforms, select_report, [('out', 'index')]), (select_report, outputnode, [('out', 'out_report')]), ]) + # fmt:on return workflow @@ -850,8 +938,8 @@ def compare_xforms(lta_list, norm_threshold=15): second transform relative to the first (default: `15`) """ - from niworkflows.interfaces.surf import load_transform from nipype.algorithms.rapidart import _calc_norm_affine + from niworkflows.interfaces.surf import load_transform bbr_affine = load_transform(lta_list[0]) fallback_affine = load_transform(lta_list[1]) @@ -864,9 +952,10 @@ def compare_xforms(lta_list, norm_threshold=15): def _conditional_downsampling(in_file, in_mask, zoom_th=4.0): """Downsamples the input dataset for sloppy mode.""" from pathlib import Path - import numpy as np + import nibabel as nb import nitransforms as nt + import numpy as np from scipy.ndimage.filters import gaussian_filter img = nb.load(in_file) diff --git a/fmriprep/workflows/bold/resampling.py b/fmriprep/workflows/bold/resampling.py index 563655bf8..fe710c8be 100644 --- a/fmriprep/workflows/bold/resampling.py +++ b/fmriprep/workflows/bold/resampling.py @@ -29,11 +29,12 @@ .. autofunction:: init_bold_preproc_trans_wf """ -from ...config import DEFAULT_MEMORY_MIN_GB - -from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, freesurfer as fs import nipype.interfaces.workbench as wb +from nipype.interfaces import freesurfer as fs +from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe + +from ...config import DEFAULT_MEMORY_MIN_GB def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_surf_wf"): @@ -105,9 +106,7 @@ def init_bold_surf_wf(mem_gb, surface_spaces, medial_surface_nan, name="bold_sur itersource = pe.Node(niu.IdentityInterface(fields=["target"]), name="itersource") itersource.iterables = [("target", surface_spaces)] - get_fsnative = pe.Node( - FreeSurferSource(), name="get_fsnative", run_without_submitting=True - ) + get_fsnative = pe.Node(FreeSurferSource(), name="get_fsnative", run_without_submitting=True) def select_target(subject_id, space): """Get the target subject ID, given a source subject ID and a target space.""" @@ -127,9 +126,7 @@ def select_target(subject_id, space): run_without_submitting=True, mem_gb=DEFAULT_MEMORY_MIN_GB, ) - itk2lta = pe.Node( - niu.Function(function=_itk2lta), name="itk2lta", run_without_submitting=True - ) + itk2lta = pe.Node(niu.Function(function=_itk2lta), name="itk2lta", run_without_submitting=True) sampler = pe.MapNode( fs.SampleToSurface( cortex_mask=True, @@ -313,16 +310,17 @@ def init_bold_std_trans_wf( described outputs. """ - from fmriprep.interfaces.maths import Clip from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.func.util import init_bold_reference_wf from niworkflows.interfaces.fixes import FixHeaderApplyTransforms as ApplyTransforms from niworkflows.interfaces.itk import MultiApplyTransforms - from niworkflows.interfaces.utility import KeySelect from niworkflows.interfaces.nibabel import GenerateSamplingReference from niworkflows.interfaces.nilearn import Merge + from niworkflows.interfaces.utility import KeySelect from niworkflows.utils.spaces import format_reference + from fmriprep.interfaces.maths import Clip + workflow = Workflow(name=name) output_references = spaces.cached.get_spaces(nonstandard=False, dim=(3,)) std_vol_references = [ @@ -364,9 +362,7 @@ def init_bold_std_trans_wf( name="inputnode", ) - iterablesource = pe.Node( - niu.IdentityInterface(fields=["std_target"]), name="iterablesource" - ) + iterablesource = pe.Node(niu.IdentityInterface(fields=["std_target"]), name="iterablesource") # Generate conversions for every template+spec at the input iterablesource.iterables = [("std_target", std_vol_references)] @@ -416,9 +412,7 @@ def init_bold_std_trans_wf( ) bold_to_std_transform = pe.Node( - MultiApplyTransforms( - interpolation="LanczosWindowedSinc", float=True, copy_dtype=True - ), + MultiApplyTransforms(interpolation="LanczosWindowedSinc", float=True, copy_dtype=True), name="bold_to_std_transform", mem_gb=mem_gb * 3 * omp_nthreads, n_procs=omp_nthreads, @@ -426,10 +420,8 @@ def init_bold_std_trans_wf( # Interpolation can occasionally produce below-zero values as an artifact threshold = pe.MapNode( - Clip(minimum=0), - name="threshold", - iterfield=['in_file'], - mem_gb=DEFAULT_MEMORY_MIN_GB) + Clip(minimum=0), name="threshold", iterfield=['in_file'], mem_gb=DEFAULT_MEMORY_MIN_GB + ) merge = pe.Node(Merge(compress=use_compression), name="merge", mem_gb=mem_gb * 3) @@ -477,9 +469,7 @@ def init_bold_std_trans_wf( if multiecho: output_names.append("t2star_std") - poutputnode = pe.Node( - niu.IdentityInterface(fields=output_names), name="poutputnode" - ) + poutputnode = pe.Node(niu.IdentityInterface(fields=output_names), name="poutputnode") # fmt:off workflow.connect([ # Connecting outputnode @@ -516,7 +506,8 @@ def init_bold_std_trans_wf( if multiecho: t2star_std_tfm = pe.Node( ApplyTransforms(interpolation="LanczosWindowedSinc", float=True), - name="t2star_std_tfm", mem_gb=1 + name="t2star_std_tfm", + mem_gb=1, ) # fmt:off workflow.connect([ @@ -597,11 +588,12 @@ def init_bold_preproc_trans_wf( BOLD series, resampled in native space, including all preprocessing """ - from fmriprep.interfaces.maths import Clip from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.itk import MultiApplyTransforms from niworkflows.interfaces.nilearn import Merge + from fmriprep.interfaces.maths import Clip + workflow = Workflow(name=name) workflow.__desc__ = """\ The BOLD time-series (including slice-timing correction when applied) @@ -619,9 +611,7 @@ def init_bold_preproc_trans_wf( ) inputnode = pe.Node( - niu.IdentityInterface( - fields=["name_source", "bold_file", "hmc_xforms", "fieldwarp"] - ), + niu.IdentityInterface(fields=["name_source", "bold_file", "hmc_xforms", "fieldwarp"]), name="inputnode", ) @@ -646,10 +636,8 @@ def init_bold_preproc_trans_wf( # Interpolation can occasionally produce below-zero values as an artifact threshold = pe.MapNode( - Clip(minimum=0), - name="threshold", - iterfield=['in_file'], - mem_gb=DEFAULT_MEMORY_MIN_GB) + Clip(minimum=0), name="threshold", iterfield=['in_file'], mem_gb=DEFAULT_MEMORY_MIN_GB + ) merge = pe.Node(Merge(compress=use_compression), name="merge", mem_gb=mem_gb * 3) @@ -669,9 +657,7 @@ def init_bold_preproc_trans_wf( return workflow -def init_bold_grayords_wf( - grayord_density, mem_gb, repetition_time, name="bold_grayords_wf" -): +def init_bold_grayords_wf(grayord_density, mem_gb, repetition_time, name="bold_grayords_wf"): """ Sample Grayordinates files onto the fsLR atlas. @@ -733,9 +719,7 @@ def init_bold_grayords_wf( density=grayord_density ) - fslr_density, mni_density = ( - ("32k", "2") if grayord_density == "91k" else ("59k", "1") - ) + fslr_density, mni_density = ("32k", "2") if grayord_density == "91k" else ("59k", "1") inputnode = pe.Node( niu.IdentityInterface( @@ -924,9 +908,10 @@ def _is_native(in_value): def _itk2lta(in_file, src_file, dst_file): - import nitransforms as nt from pathlib import Path + import nitransforms as nt + out_file = Path("out.lta").absolute() nt.linear.load( in_file, fmt="fs" if in_file.endswith(".lta") else "itk", reference=src_file diff --git a/fmriprep/workflows/bold/stc.py b/fmriprep/workflows/bold/stc.py index 6cd13ad8c..877c73296 100644 --- a/fmriprep/workflows/bold/stc.py +++ b/fmriprep/workflows/bold/stc.py @@ -27,15 +27,15 @@ .. autofunction:: init_bold_stc_wf """ -import numpy as np import nibabel as nb -from nipype.pipeline import engine as pe -from nipype.interfaces import utility as niu, afni +import numpy as np +from nipype.interfaces import afni +from nipype.interfaces import utility as niu from nipype.interfaces.base import isdefined +from nipype.pipeline import engine as pe from ... import config - LOGGER = config.loggers.workflow @@ -112,15 +112,18 @@ def init_bold_stc_wf(metadata, name='bold_stc_wf'): # It would be good to fingerprint memory use of afni.TShift slice_timing_correction = pe.Node( - TShift(outputtype='NIFTI_GZ', - tr=f"{metadata['RepetitionTime']}s", - slice_timing=metadata['SliceTiming'], - slice_encoding_direction=metadata.get('SliceEncodingDirection', 'k'), - tzero=tzero), - name='slice_timing_correction') + TShift( + outputtype='NIFTI_GZ', + tr=f"{metadata['RepetitionTime']}s", + slice_timing=metadata['SliceTiming'], + slice_encoding_direction=metadata.get('SliceEncodingDirection', 'k'), + tzero=tzero, + ), + name='slice_timing_correction', + ) copy_xform = pe.Node(CopyXForm(), name='copy_xform', mem_gb=0.1) - + # fmt:off workflow.connect([ (inputnode, slice_timing_correction, [('bold_file', 'in_file'), ('skip_vols', 'ignore')]), @@ -128,5 +131,6 @@ def init_bold_stc_wf(metadata, name='bold_stc_wf'): (inputnode, copy_xform, [('bold_file', 'hdr_file')]), (copy_xform, outputnode, [('out_file', 'stc_file')]), ]) + # fmt:on return workflow diff --git a/fmriprep/workflows/bold/t2s.py b/fmriprep/workflows/bold/t2s.py index 8ddacabcb..951046c5a 100644 --- a/fmriprep/workflows/bold/t2s.py +++ b/fmriprep/workflows/bold/t2s.py @@ -27,21 +27,19 @@ .. autofunction:: init_bold_t2s_wf """ -from nipype.pipeline import engine as pe from nipype.interfaces import utility as niu +from nipype.pipeline import engine as pe +from ... import config +from ...interfaces.maths import Clip, Label2Mask from ...interfaces.multiecho import T2SMap -from ...interfaces.maths import Label2Mask, Clip from ...interfaces.reports import LabeledHistogram -from ... import config - LOGGER = config.loggers.workflow # pylint: disable=R0914 -def init_bold_t2s_wf(echo_times, mem_gb, omp_nthreads, - name='bold_t2s_wf'): +def init_bold_t2s_wf(echo_times, mem_gb, omp_nthreads, name='bold_t2s_wf'): r""" Combine multiple echos of :abbr:`ME-EPI (multi-echo echo-planar imaging)`. @@ -101,13 +99,14 @@ def init_bold_t2s_wf(echo_times, mem_gb, omp_nthreads, LOGGER.log(25, 'Generating T2* map and optimally combined ME-EPI time series.') t2smap_node = pe.Node(T2SMap(echo_times=list(echo_times)), name='t2smap_node') - + # fmt:off workflow.connect([ (inputnode, t2smap_node, [('bold_file', 'in_files'), ('bold_mask', 'mask_file')]), (t2smap_node, outputnode, [('optimal_comb', 'bold'), ('t2star_map', 't2star_map')]), ]) + # fmt:on return workflow @@ -156,14 +155,13 @@ def init_t2s_reporting_wf(name='t2s_reporting_wf'): workflow = pe.Workflow(name=name) inputnode = pe.Node( - niu.IdentityInterface( - fields=['t2star_file', 'boldref', 'label_file', 'label_bold_xform'] - ), - name='inputnode') + niu.IdentityInterface(fields=['t2star_file', 'boldref', 'label_file', 'label_bold_xform']), + name='inputnode', + ) outputnode = pe.Node( - niu.IdentityInterface(fields=['t2star_hist', 't2s_comp_report']), - name='outputnode') + niu.IdentityInterface(fields=['t2star_hist', 't2s_comp_report']), name='outputnode' + ) label_tfm = pe.Node(ApplyTransforms(interpolation="MultiLabel"), name="label_tfm") @@ -171,8 +169,9 @@ def init_t2s_reporting_wf(name='t2s_reporting_wf'): clip_t2star = pe.Node(Clip(maximum=0.1), name="clip_t2star") - t2s_hist = pe.Node(LabeledHistogram(mapping={1: "Gray matter"}, xlabel='T2* (s)'), - name='t2s_hist') + t2s_hist = pe.Node( + LabeledHistogram(mapping={1: "Gray matter"}, xlabel='T2* (s)'), name='t2s_hist' + ) t2s_comparison = pe.Node( SimpleBeforeAfter( @@ -183,7 +182,7 @@ def init_t2s_reporting_wf(name='t2s_reporting_wf'): name="t2s_comparison", mem_gb=0.1, ) - + # fmt:off workflow.connect([ (inputnode, label_tfm, [('label_file', 'input_image'), ('t2star_file', 'reference_image'), @@ -198,5 +197,5 @@ def init_t2s_reporting_wf(name='t2s_reporting_wf'): (t2s_hist, outputnode, [('out_report', 't2star_hist')]), (t2s_comparison, outputnode, [('out_report', 't2s_comp_report')]), ]) - + # fmt:on return workflow diff --git a/fmriprep/workflows/bold/tests/test_confounds.py b/fmriprep/workflows/bold/tests/test_confounds.py index cacf79d3c..2947e253a 100644 --- a/fmriprep/workflows/bold/tests/test_confounds.py +++ b/fmriprep/workflows/bold/tests/test_confounds.py @@ -21,24 +21,24 @@ # https://www.nipreps.org/community/licensing/ # ''' Testing module for fmriprep.workflows.bold.confounds ''' -import pytest import os + import nibabel as nib +import pytest from ..confounds import _add_volumes, _remove_volumes - skip_pytest = pytest.mark.skipif( - not os.getenv('FMRIPREP_REGRESSION_SOURCE') - or not os.getenv('FMRIPREP_REGRESSION_TARGETS'), - reason='FMRIPREP_REGRESSION_{SOURCE,TARGETS} env vars not set' + not os.getenv('FMRIPREP_REGRESSION_SOURCE') or not os.getenv('FMRIPREP_REGRESSION_TARGETS'), + reason='FMRIPREP_REGRESSION_{SOURCE,TARGETS} env vars not set', ) @skip_pytest def test_remove_volumes(): - bold_file = os.path.join(os.getenv('FMRIPREP_REGRESSION_SOURCE'), - 'ds001362/sub-01_task-taskname_run-01_bold.nii.gz') + bold_file = os.path.join( + os.getenv('FMRIPREP_REGRESSION_SOURCE'), 'ds001362/sub-01_task-taskname_run-01_bold.nii.gz' + ) n_volumes = nib.load(bold_file).shape[3] skip_vols = 3 @@ -54,8 +54,9 @@ def test_remove_volumes(): @skip_pytest def test_add_volumes(): - bold_file = os.path.join(os.getenv('FMRIPREP_REGRESSION_SOURCE'), - 'ds001362/sub-01_task-taskname_run-01_bold.nii.gz') + bold_file = os.path.join( + os.getenv('FMRIPREP_REGRESSION_SOURCE'), 'ds001362/sub-01_task-taskname_run-01_bold.nii.gz' + ) n_volumes = nib.load(bold_file).shape[3] add_vols = 3 diff --git a/fmriprep/workflows/tests.py b/fmriprep/workflows/tests.py index 1b4586581..b79cd07e4 100644 --- a/fmriprep/workflows/tests.py +++ b/fmriprep/workflows/tests.py @@ -24,15 +24,17 @@ import os from contextlib import contextmanager from pathlib import Path +from tempfile import mkdtemp + from pkg_resources import resource_filename as pkgrf from toml import loads -from tempfile import mkdtemp @contextmanager def mock_config(): """Create a mock config for documentation and testing purposes.""" from .. import config + _old_fs = os.getenv('FREESURFER_HOME') if not _old_fs: os.environ['FREESURFER_HOME'] = mkdtemp() diff --git a/pyproject.toml b/pyproject.toml index b513b1e47..3f8f19704 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,10 @@ requires = ["setuptools >= 40.8.0", "wheel"] [tool.black] line-length = 99 -target-version = ['py37'] +target-version = ['py39'] skip-string-normalization = true extend-exclude = '_version.py|versioneer.py' + +[tool.isort] +profile = 'black' +extend_skip = '_version.py' diff --git a/setup.cfg b/setup.cfg index 424050e36..6dd5f53ff 100644 --- a/setup.cfg +++ b/setup.cfg @@ -56,6 +56,10 @@ doc = sphinx-argparse sphinx_rtd_theme sphinxcontrib-napoleon +dev = + black ~= 22.3.0 + pre-commit + isort ~= 5.10.1 docs = %(doc)s duecredit = duecredit @@ -112,6 +116,7 @@ doctests = False exclude=*build/ ignore = W503 + E203 per-file-ignores = **/__init__.py : F401 docs/conf.py : E265 diff --git a/wrapper/fmriprep_docker.py b/wrapper/fmriprep_docker.py index 1e2ddd220..8a51cb12b 100755 --- a/wrapper/fmriprep_docker.py +++ b/wrapper/fmriprep_docker.py @@ -15,15 +15,22 @@ `NiPreps documentation `__. Please report any feedback to our `GitHub repository `__. """ -import sys import os import re import subprocess +import sys __version__ = '99.99.99' __copyright__ = 'Copyright 2020, Center for Reproducible Neuroscience, Stanford University' -__credits__ = ['Craig Moodie', 'Ross Blair', 'Oscar Esteban', 'Chris Gorgolewski', - 'Shoshana Berleant', 'Christopher J. Markiewicz', 'Russell A. Poldrack'] +__credits__ = [ + 'Craig Moodie', + 'Ross Blair', + 'Oscar Esteban', + 'Chris Gorgolewski', + 'Shoshana Berleant', + 'Christopher J. Markiewicz', + 'Russell A. Poldrack', +] __bugreports__ = 'https://github.com/nipreps/fmriprep/issues' @@ -47,14 +54,7 @@ 'fsaverage5', 'fsaverage6', ) -NONSTANDARD_REFERENCES = ( - 'anat', - 'T1w', - 'run', - 'func', - 'sbref', - 'fsnative' -) +NONSTANDARD_REFERENCES = ('anat', 'T1w', 'run', 'func', 'sbref', 'fsnative') # Monkey-patch Py2 subprocess if not hasattr(subprocess, 'DEVNULL'): @@ -64,6 +64,7 @@ # Reimplement minimal functionality for usage in this file def _run(args, stdout=None, stderr=None): from collections import namedtuple + result = namedtuple('CompletedProcess', 'stdout stderr returncode') devnull = None @@ -82,6 +83,7 @@ def _run(args, stdout=None, stderr=None): devnull.close() return res + subprocess.run = _run @@ -101,12 +103,12 @@ def check_docker(): -1 Docker can't be found 0 Docker found, but user can't connect to daemon 1 Test run OK - """ + """ try: - ret = subprocess.run(['docker', 'version'], stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + ret = subprocess.run(['docker', 'version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) except OSError as e: from errno import ENOENT + if e.errno == ENOENT: return -1 raise e @@ -117,22 +119,21 @@ def check_docker(): def check_image(image): """Check whether image is present on local system""" - ret = subprocess.run(['docker', 'images', '-q', image], - stdout=subprocess.PIPE) + ret = subprocess.run(['docker', 'images', '-q', image], stdout=subprocess.PIPE) return bool(ret.stdout) def check_memory(image): """Check total memory from within a docker container""" - ret = subprocess.run(['docker', 'run', '--rm', '--entrypoint=free', - image, '-m'], - stdout=subprocess.PIPE) + ret = subprocess.run( + ['docker', 'run', '--rm', '--entrypoint=free', image, '-m'], stdout=subprocess.PIPE + ) if ret.returncode: return -1 - mem = [line.decode().split()[1] - for line in ret.stdout.splitlines() - if line.startswith(b'Mem:')][0] + mem = [ + line.decode().split()[1] for line in ret.stdout.splitlines() if line.startswith(b'Mem:') + ][0] return int(mem) @@ -199,18 +200,22 @@ def _get_posargs(usage): } assert overlap == expected_overlap, "Clobbering options: {}".format( - ', '.join(overlap - expected_overlap)) + ', '.join(overlap - expected_overlap) + ) sections = [] # Construct usage - start = w_usage[:w_usage.index(' [')] + start = w_usage[: w_usage.index(' [')] indent = ' ' * len(start) - new_options = sum(( - w_options[:2], - [opt for opt, flag in zip(t_options, t_flags) if flag not in overlap], - w_options[2:] - ), []) + new_options = sum( + ( + w_options[:2], + [opt for opt, flag in zip(t_options, t_flags) if flag not in overlap], + w_options[2:], + ), + [], + ) opt_line_length = 79 - len(start) length = 0 opt_lines = [start] @@ -242,8 +247,7 @@ def _get_posargs(usage): def is_in_directory(filepath, directory): - return os.path.realpath(filepath).startswith( - os.path.realpath(directory) + os.sep) + return os.path.realpath(filepath).startswith(os.path.realpath(directory) + os.sep) def get_parser(): @@ -263,45 +267,54 @@ def _is_file(path, parser): """Ensure a given path exists and it is a file.""" path = os.path.abspath(path) if not os.path.isfile(path): - raise parser.error( - "Path should point to a file (or symlink of file): <%s>." % path - ) + raise parser.error("Path should point to a file (or symlink of file): <%s>." % path) return path parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.ArgumentDefaultsHelpFormatter, - add_help=False) + description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter, add_help=False + ) IsFile = partial(_is_file, parser=parser) # Standard FMRIPREP arguments - parser.add_argument('bids_dir', nargs='?', type=os.path.abspath, - default='') - parser.add_argument('output_dir', nargs='?', type=os.path.abspath, - default='') - parser.add_argument('analysis_level', nargs='?', choices=['participant'], - default='participant') - - parser.add_argument('-h', '--help', action='store_true', - help="show this help message and exit") - parser.add_argument('--version', action='store_true', - help="show program's version number and exit") + parser.add_argument('bids_dir', nargs='?', type=os.path.abspath, default='') + parser.add_argument('output_dir', nargs='?', type=os.path.abspath, default='') + parser.add_argument( + 'analysis_level', nargs='?', choices=['participant'], default='participant' + ) + + parser.add_argument( + '-h', '--help', action='store_true', help="show this help message and exit" + ) + parser.add_argument( + '--version', action='store_true', help="show program's version number and exit" + ) # Allow alternative images (semi-developer) - parser.add_argument('-i', '--image', metavar='IMG', type=str, - default='nipreps/fmriprep:{}'.format(__version__), - help='image name') + parser.add_argument( + '-i', + '--image', + metavar='IMG', + type=str, + default='nipreps/fmriprep:{}'.format(__version__), + help='image name', + ) # Options for mapping files and directories into container # Update `expected_overlap` variable in merge_help() when adding to this g_wrap = parser.add_argument_group( - 'Wrapper options', - 'Standard options that require mapping files into the container') - g_wrap.add_argument('-w', '--work-dir', action='store', type=os.path.abspath, - help='path where intermediate results should be stored') + 'Wrapper options', 'Standard options that require mapping files into the container' + ) + g_wrap.add_argument( + '-w', + '--work-dir', + action='store', + type=os.path.abspath, + help='path where intermediate results should be stored', + ) g_wrap.add_argument( - '--output-spaces', nargs="*", + '--output-spaces', + nargs="*", help="""\ Standard and non-standard spaces to resample anatomical and functional images to. \ Standard spaces may be specified by the form \ @@ -311,60 +324,107 @@ def _is_file(path, parser): Non-standard spaces (valid keywords: %s) imply specific orientations and sampling \ grids. \ Important to note, the ``res-*`` modifier does not define the resolution used for \ -the spatial normalization.""" % (', '.join('"%s"' % s for s in TF_TEMPLATES), - ', '.join(NONSTANDARD_REFERENCES))) +the spatial normalization.""" + % (', '.join('"%s"' % s for s in TF_TEMPLATES), ', '.join(NONSTANDARD_REFERENCES)), + ) g_wrap.add_argument( - '--fs-license-file', metavar='PATH', type=IsFile, + '--fs-license-file', + metavar='PATH', + type=IsFile, default=os.getenv('FS_LICENSE', None), help='Path to FreeSurfer license key file. Get it (for free) by registering' - ' at https://surfer.nmr.mgh.harvard.edu/registration.html') + ' at https://surfer.nmr.mgh.harvard.edu/registration.html', + ) g_wrap.add_argument( - '--fs-subjects-dir', metavar='PATH', type=os.path.abspath, + '--fs-subjects-dir', + metavar='PATH', + type=os.path.abspath, help='Path to existing FreeSurfer subjects directory to reuse. ' - '(default: OUTPUT_DIR/freesurfer)') + '(default: OUTPUT_DIR/freesurfer)', + ) g_wrap.add_argument( - '--config-file', metavar='PATH', type=os.path.abspath, + '--config-file', + metavar='PATH', + type=os.path.abspath, help="Use pre-generated configuration file. Values in file will be overridden " - "by command-line arguments.") + "by command-line arguments.", + ) g_wrap.add_argument( - '--anat-derivatives', metavar='PATH', type=os.path.abspath, + '--anat-derivatives', + metavar='PATH', + type=os.path.abspath, help='Path to existing sMRIPrep/fMRIPrep-anatomical derivatives to fasttrack ' - 'the anatomical workflow.') + 'the anatomical workflow.', + ) g_wrap.add_argument( - '--use-plugin', metavar='PATH', action='store', default=None, - type=os.path.abspath, help='nipype plugin configuration file') + '--use-plugin', + metavar='PATH', + action='store', + default=None, + type=os.path.abspath, + help='nipype plugin configuration file', + ) g_wrap.add_argument( - '--bids-database-dir', metavar='PATH', type=os.path.abspath, + '--bids-database-dir', + metavar='PATH', + type=os.path.abspath, help="Path to an existing PyBIDS database folder, for faster indexing " - "(especially useful for large datasets).") + "(especially useful for large datasets).", + ) g_wrap.add_argument( - '--bids-filter-file', metavar='PATH', type=os.path.abspath, + '--bids-filter-file', + metavar='PATH', + type=os.path.abspath, help="a JSON file describing custom BIDS input filters using PyBIDS. " "For further details, please check out " "https://fmriprep.readthedocs.io/en/latest/faq.html#" - "how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep") + "how-do-I-select-only-certain-files-to-be-input-to-fMRIPrep", + ) # Developer patch/shell options g_dev = parser.add_argument_group( - 'Developer options', - 'Tools for testing and debugging FMRIPREP') - g_dev.add_argument('--patch', nargs="+", metavar="PACKAGE=PATH", action=ToDict, - help='local repository to use within container') - g_dev.add_argument('--shell', action='store_true', - help='open shell in image instead of running FMRIPREP') - g_dev.add_argument('--config', metavar='PATH', action='store', - type=os.path.abspath, help='Use custom nipype.cfg file') - g_dev.add_argument('-e', '--env', action='append', nargs=2, metavar=('ENV_VAR', 'value'), - help='Set custom environment variable within container') - g_dev.add_argument('-u', '--user', action='store', - help='Run container as a given user/uid. Additionally, group/gid can be' - 'assigned, (i.e., --user :)') - g_dev.add_argument('--network', action='store', - help='Run container with a different network driver ' - '("none" to simulate no internet connection)') - g_dev.add_argument('--no-tty', action='store_true', - help='Run docker without TTY flag -it') + 'Developer options', 'Tools for testing and debugging FMRIPREP' + ) + g_dev.add_argument( + '--patch', + nargs="+", + metavar="PACKAGE=PATH", + action=ToDict, + help='local repository to use within container', + ) + g_dev.add_argument( + '--shell', action='store_true', help='open shell in image instead of running FMRIPREP' + ) + g_dev.add_argument( + '--config', + metavar='PATH', + action='store', + type=os.path.abspath, + help='Use custom nipype.cfg file', + ) + g_dev.add_argument( + '-e', + '--env', + action='append', + nargs=2, + metavar=('ENV_VAR', 'value'), + help='Set custom environment variable within container', + ) + g_dev.add_argument( + '-u', + '--user', + action='store', + help='Run container as a given user/uid. Additionally, group/gid can be' + 'assigned, (i.e., --user :)', + ) + g_dev.add_argument( + '--network', + action='store', + help='Run container with a different network driver ' + '("none" to simulate no internet connection)', + ) + g_dev.add_argument('--no-tty', action='store_true', help='Run docker without TTY flag -it') return parser @@ -413,12 +473,16 @@ def main(): # Warn on low memory allocation mem_total = check_memory(opts.image) if mem_total == -1: - print('Could not detect memory capacity of Docker container.\n' - 'Do you have permission to run docker?') + print( + 'Could not detect memory capacity of Docker container.\n' + 'Do you have permission to run docker?' + ) return 1 if not (opts.help or opts.version or '--reports-only' in unknown_args) and mem_total < 8000: - print('Warning: <8GB of RAM is available within your Docker ' - 'environment.\nSome parts of fMRIPrep may fail to complete.') + print( + 'Warning: <8GB of RAM is available within your Docker ' + 'environment.\nSome parts of fMRIPrep may fail to complete.' + ) if '--mem_mb' not in unknown_args: resp = 'N' try: @@ -429,12 +493,12 @@ def main(): if resp not in ('y', 'Y', ''): return 0 - ret = subprocess.run(['docker', 'version', '--format', "{{.Server.Version}}"], - stdout=subprocess.PIPE) + ret = subprocess.run( + ['docker', 'version', '--format', "{{.Server.Version}}"], stdout=subprocess.PIPE + ) docker_version = ret.stdout.decode('ascii').strip() - command = ['docker', 'run', '--rm', '-e', - 'DOCKER_VERSION_8395080871=%s' % docker_version] + command = ['docker', 'run', '--rm', '-e', 'DOCKER_VERSION_8395080871=%s' % docker_version] if not opts.no_tty: if opts.help: @@ -446,9 +510,7 @@ def main(): # Patch working repositories into installed package directories if opts.patch: for pkg, repo_path in opts.patch.items(): - command.extend( - ['-v', '{}:{}/{}:ro'.format(repo_path, PKG_PATH, pkg)] - ) + command.extend(['-v', '{}:{}/{}:ro'.format(repo_path, PKG_PATH, pkg)]) if opts.env: for envvar in opts.env: @@ -458,9 +520,7 @@ def main(): command.extend(['-u', opts.user]) if opts.fs_license_file: - command.extend([ - '-v', '{}:/opt/freesurfer/license.txt:ro'.format( - opts.fs_license_file)]) + command.extend(['-v', '{}:/opt/freesurfer/license.txt:ro'.format(opts.fs_license_file)]) main_args = [] if opts.bids_dir: @@ -495,16 +555,15 @@ def main(): if is_in_directory(opts.work_dir, opts.bids_dir): print( 'The selected working directory is a subdirectory of the input BIDS folder. ' - 'Please modify the output path.') + 'Please modify the output path.' + ) return 1 if opts.config: - command.extend(['-v', ':'.join(( - opts.config, '/home/fmriprep/.nipype/nipype.cfg', 'ro'))]) + command.extend(['-v', ':'.join((opts.config, '/home/fmriprep/.nipype/nipype.cfg', 'ro'))]) if opts.use_plugin: - command.extend(['-v', ':'.join((opts.use_plugin, '/tmp/plugin.yml', - 'ro'))]) + command.extend(['-v', ':'.join((opts.use_plugin, '/tmp/plugin.yml', 'ro'))]) unknown_args.extend(['--use-plugin', '/tmp/plugin.yml']) if opts.bids_database_dir: