From 6a792eddd974fa03f8fc90bc89df609378febb09 Mon Sep 17 00:00:00 2001 From: Oscar Esteban Date: Tue, 19 Mar 2024 21:46:35 +0100 Subject: [PATCH] sty: apply unsafe ruff fixes --- .maint/update_authors.py | 155 ++++++++++++----------- docs/source/conf.py | 99 ++++++++------- mriqc/bin/abide2bids.py | 14 +- mriqc/bin/fs2gif.py | 10 +- mriqc/bin/mriqcwebapi_test.py | 5 +- mriqc/bin/subject_wrangler.py | 2 +- mriqc/cli/parser.py | 15 ++- mriqc/cli/run.py | 10 +- mriqc/cli/version.py | 33 +++-- mriqc/config.py | 33 +++-- mriqc/data/config.py | 1 + mriqc/engine/plugin.py | 16 ++- mriqc/instrumentation/resources.py | 14 +- mriqc/instrumentation/viz.py | 2 +- mriqc/interfaces/anatomical.py | 2 +- mriqc/interfaces/common/__init__.py | 15 +-- mriqc/interfaces/common/conform_image.py | 53 ++++++-- mriqc/interfaces/data_types.py | 69 ---------- mriqc/interfaces/datalad.py | 4 +- mriqc/interfaces/webapi.py | 5 +- mriqc/qc/tests/test_anatomical.py | 3 +- mriqc/reports/group.py | 4 +- mriqc/synthstrip/model.py | 2 +- mriqc/tests/test_config.py | 4 +- mriqc/tests/test_reports.py | 2 +- mriqc/utils/debug.py | 6 +- mriqc/workflows/anatomical/base.py | 10 +- mriqc/workflows/diffusion/base.py | 4 +- pyproject.toml | 9 +- 29 files changed, 297 insertions(+), 304 deletions(-) mode change 100644 => 100755 .maint/update_authors.py delete mode 100644 mriqc/interfaces/data_types.py diff --git a/.maint/update_authors.py b/.maint/update_authors.py old mode 100644 new mode 100755 index 3c7b73ac1..c16dca814 --- a/.maint/update_authors.py +++ b/.maint/update_authors.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 """Update and sort the creators list of the zenodo record.""" +import json import sys from pathlib import Path -import json + import click from fuzzywuzzy import fuzz, process @@ -35,10 +36,10 @@ def read_md_table(md_text): keys = None retval = [] for line in md_text.splitlines(): - if line.strip().startswith("| --- |"): + if line.strip().startswith('| --- |'): keys = ( - k.replace("*", "").strip() - for k in prev.split("|") + k.replace('*', '').strip() + for k in prev.split('|') ) keys = [k.lower() for k in keys if k] continue @@ -46,10 +47,10 @@ def read_md_table(md_text): prev = line continue - if not line or not line.strip().startswith("|"): + if not line or not line.strip().startswith('|'): break - values = [v.strip() or None for v in line.split("|")][1:-1] + values = [v.strip() or None for v in line.split('|')][1:-1] retval.append({k: v for k, v in zip(keys, values) if v}) return retval @@ -58,13 +59,13 @@ def read_md_table(md_text): def sort_contributors(entries, git_lines, exclude=None, last=None): """Return a list of author dictionaries, ordered by contribution.""" last = last or [] - sorted_authors = sorted(entries, key=lambda i: i["name"]) + sorted_authors = sorted(entries, key=lambda i: i['name']) first_last = [ - " ".join(val["name"].split(",")[::-1]).strip() for val in sorted_authors + ' '.join(val['name'].split(',')[::-1]).strip() for val in sorted_authors ] first_last_excl = [ - " ".join(val["name"].split(",")[::-1]).strip() for val in exclude or [] + ' '.join(val['name'].split(',')[::-1]).strip() for val in exclude or [] ] unmatched = [] @@ -85,7 +86,7 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): if val not in author_matches: author_matches.append(val) - names = {" ".join(val["name"].split(",")[::-1]).strip() for val in author_matches} + names = {' '.join(val['name'].split(',')[::-1]).strip() for val in author_matches} for missing_name in first_last: if missing_name not in names: missing = sorted_authors[first_last.index(missing_name)] @@ -93,7 +94,7 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): position_matches = [] for i, item in enumerate(author_matches): - pos = item.pop("position", None) + pos = item.pop('position', None) if pos is not None: position_matches.append((i, int(pos))) @@ -105,7 +106,7 @@ def sort_contributors(entries, git_lines, exclude=None, last=None): return author_matches, unmatched -def get_git_lines(fname="line-contributors.txt"): +def get_git_lines(fname='line-contributors.txt'): """Run git-line-summary.""" import shutil import subprocess as sp @@ -114,33 +115,33 @@ def get_git_lines(fname="line-contributors.txt"): lines = [] if contrib_file.exists(): - print("WARNING: Reusing existing line-contributors.txt file.", file=sys.stderr) + print('WARNING: Reusing existing line-contributors.txt file.', file=sys.stderr) lines = contrib_file.read_text().splitlines() - git_line_summary_path = shutil.which("git-line-summary") + git_line_summary_path = shutil.which('git-line-summary') if not lines and git_line_summary_path: - print("Running git-line-summary on repo") + print('Running git-line-summary on repo') lines = sp.check_output([git_line_summary_path]).decode().splitlines() - lines = [l for l in lines if "Not Committed Yet" not in l] - contrib_file.write_text("\n".join(lines)) + lines = [line for line in lines if 'Not Committed Yet' not in line] + contrib_file.write_text('\n'.join(lines)) if not lines: - raise RuntimeError( - """\ -Could not find line-contributors from git repository.%s""" - % """ \ -git-line-summary not found, please install git-extras. """ + _msg = ( + ': git-line-summary not found, please install git-extras ' * (git_line_summary_path is None) ) - return [" ".join(line.strip().split()[1:-1]) for line in lines if "%" in line] + raise RuntimeError( + f'Could not find line-contributors from git repository{_msg}.' + ) + return [' '.join(line.strip().split()[1:-1]) for line in lines if '%' in line] def _namelast(inlist): retval = [] for i in inlist: - i["name"] = (f"{i.pop('name', '')} {i.pop('lastname', '')}").strip() - if not i["name"]: - i["name"] = i.get("handle", "") + i['name'] = (f"{i.pop('name', '')} {i.pop('lastname', '')}").strip() + if not i['name']: + i['name'] = i.get('handle', '') retval.append(i) return retval @@ -152,12 +153,12 @@ def cli(): @cli.command() -@click.option("-z", "--zenodo-file", type=click.Path(exists=True), default=".zenodo.json") -@click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md") -@click.option("-c", "--contributors", type=click.Path(exists=True), - default=".maint/CONTRIBUTORS.md") -@click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md") -@click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md") +@click.option('-z', '--zenodo-file', type=click.Path(exists=True), default='.zenodo.json') +@click.option('-m', '--maintainers', type=click.Path(exists=True), default='.maint/MAINTAINERS.md') +@click.option('-c', '--contributors', type=click.Path(exists=True), + default='.maint/CONTRIBUTORS.md') +@click.option('--pi', type=click.Path(exists=True), default='.maint/PIs.md') +@click.option('-f', '--former-file', type=click.Path(exists=True), default='.maint/FORMER.md') def zenodo( zenodo_file, maintainers, @@ -185,18 +186,18 @@ def zenodo( zen_pi = _namelast(reversed(read_md_table(Path(pi).read_text()))) - zenodo["creators"] = zen_creators - zenodo["contributors"] = zen_contributors + [ + zenodo['creators'] = zen_creators + zenodo['contributors'] = zen_contributors + [ pi for pi in zen_pi if pi not in zen_contributors ] creator_names = { - c["name"] for c in zenodo["creators"] - if c["name"] != "" + c['name'] for c in zenodo['creators'] + if c['name'] != '' } - zenodo["contributors"] = [ - c for c in zenodo["contributors"] - if c["name"] not in creator_names + zenodo['contributors'] = [ + c for c in zenodo['contributors'] + if c['name'] not in creator_names ] misses = set(miss_creators).intersection(miss_contributors) @@ -208,35 +209,35 @@ def zenodo( ) # Remove position - for creator in zenodo["creators"]: - creator.pop("position", None) - creator.pop("handle", None) - if "affiliation" not in creator: - creator["affiliation"] = "Unknown affiliation" - elif isinstance(creator["affiliation"], list): - creator["affiliation"] = creator["affiliation"][0] - - for creator in zenodo["contributors"]: - creator.pop("handle", None) - creator["type"] = "Researcher" - creator.pop("position", None) - - if "affiliation" not in creator: - creator["affiliation"] = "Unknown affiliation" - elif isinstance(creator["affiliation"], list): - creator["affiliation"] = creator["affiliation"][0] + for creator in zenodo['creators']: + creator.pop('position', None) + creator.pop('handle', None) + if 'affiliation' not in creator: + creator['affiliation'] = 'Unknown affiliation' + elif isinstance(creator['affiliation'], list): + creator['affiliation'] = creator['affiliation'][0] + + for creator in zenodo['contributors']: + creator.pop('handle', None) + creator['type'] = 'Researcher' + creator.pop('position', None) + + if 'affiliation' not in creator: + creator['affiliation'] = 'Unknown affiliation' + elif isinstance(creator['affiliation'], list): + creator['affiliation'] = creator['affiliation'][0] Path(zenodo_file).write_text( - "%s\n" % json.dumps(zenodo, indent=2) + '%s\n' % json.dumps(zenodo, indent=2) ) @cli.command() -@click.option("-m", "--maintainers", type=click.Path(exists=True), default=".maint/MAINTAINERS.md") -@click.option("-c", "--contributors", type=click.Path(exists=True), - default=".maint/CONTRIBUTORS.md") -@click.option("--pi", type=click.Path(exists=True), default=".maint/PIs.md") -@click.option("-f", "--former-file", type=click.Path(exists=True), default=".maint/FORMER.md") +@click.option('-m', '--maintainers', type=click.Path(exists=True), default='.maint/MAINTAINERS.md') +@click.option('-c', '--contributors', type=click.Path(exists=True), + default='.maint/CONTRIBUTORS.md') +@click.option('--pi', type=click.Path(exists=True), default='.maint/PIs.md') +@click.option('-f', '--former-file', type=click.Path(exists=True), default='.maint/FORMER.md') def publication( maintainers, contributors, @@ -257,10 +258,10 @@ def publication( ) pi_hits = _namelast(reversed(read_md_table(Path(pi).read_text()))) - pi_names = [pi["name"] for pi in pi_hits] + pi_names = [pi['name'] for pi in pi_hits] hits = [ hit for hit in hits - if hit["name"] not in pi_names + if hit['name'] not in pi_names ] + pi_hits def _aslist(value): @@ -271,16 +272,16 @@ def _aslist(value): # Remove position affiliations = [] for item in hits: - item.pop("position", None) - for a in _aslist(item.get("affiliation", "Unaffiliated")): + item.pop('position', None) + for a in _aslist(item.get('affiliation', 'Unaffiliated')): if a not in affiliations: affiliations.append(a) aff_indexes = [ - ", ".join( + ', '.join( [ - "%d" % (affiliations.index(a) + 1) - for a in _aslist(author.get("affiliation", "Unaffiliated")) + '%d' % (affiliations.index(a) + 1) + for a in _aslist(author.get('affiliation', 'Unaffiliated')) ] ) for author in hits @@ -293,25 +294,25 @@ def _aslist(value): file=sys.stderr, ) - print("Authors (%d):" % len(hits)) + print('Authors (%d):' % len(hits)) print( - "%s." - % "; ".join( + '%s.' + % '; '.join( [ - "%s \\ :sup:`%s`\\ " % (i["name"], idx) + '{} \\ :sup:`{}`\\ '.format(i['name'], idx) for i, idx in zip(hits, aff_indexes) ] ) ) print( - "\n\nAffiliations:\n%s" - % "\n".join( - ["{0: >2}. {1}".format(i + 1, a) for i, a in enumerate(affiliations)] + '\n\nAffiliations:\n%s' + % '\n'.join( + [f'{i + 1: >2}. {a}' for i, a in enumerate(affiliations)] ) ) -if __name__ == "__main__": +if __name__ == '__main__': """ Install entry-point """ cli() diff --git a/docs/source/conf.py b/docs/source/conf.py index 88033a488..a6a6848ae 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -12,12 +12,13 @@ serve to show the default. """ import os + from packaging.version import Version -from mriqc import __version__, __copyright__ +from mriqc import __copyright__, __version__ # Disable etelemetry during doc builds -os.environ["NIPYPE_NO_ET"] = "1" +os.environ['NIPYPE_NO_ET'] = '1' # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -29,51 +30,51 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.doctest", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.mathjax", - "sphinx.ext.ifconfig", - "sphinx.ext.viewcode", - "nipype.sphinxext.plot_workflow", - "sphinxarg.ext", # argparse extension + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'nipype.sphinxext.plot_workflow', + 'sphinxarg.ext', # argparse extension # 'sphinx.ext.autosectionlabel', ] # Mock modules in autodoc: autodoc_mock_imports = [ - "dipy", - "matplotlib", - "nilearn", - "numpy", - "pandas", - "scipy", - "seaborn", - "sklearn", - "statsmodels", - "xgboost", + 'dipy', + 'matplotlib', + 'nilearn', + 'numpy', + 'pandas', + 'scipy', + 'seaborn', + 'sklearn', + 'statsmodels', + 'xgboost', ] -suppress_warnings = ["image.nonlocal_uri"] +suppress_warnings = ['image.nonlocal_uri'] # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] -source_suffix = ".rst" +source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = "index" +master_doc = 'index' # General information about the project. -project = "mriqc" -author = "The NiPreps Developers" +project = 'mriqc' +author = 'The NiPreps Developers' copyright = __copyright__ # The version info for the project you're documenting, acts as replacement for @@ -90,7 +91,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = "en" +language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -118,7 +119,7 @@ # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" +pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -134,7 +135,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "sphinx_rtd_theme" +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -163,7 +164,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied @@ -226,7 +227,7 @@ # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = "mriqcdoc" +htmlhelp_basename = 'mriqcdoc' # -- Options for LaTeX output --------------------------------------------- @@ -247,10 +248,10 @@ latex_documents = [ ( master_doc, - "mriqc.tex", - "mriqc Documentation", + 'mriqc.tex', + 'mriqc Documentation', author, - "manual", + 'manual', ), ] @@ -279,7 +280,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "mriqc", "mriqc Documentation", [author], 1)] +man_pages = [(master_doc, 'mriqc', 'mriqc Documentation', [author], 1)] # If true, show URL addresses after external links. # man_show_urls = False @@ -293,12 +294,12 @@ texinfo_documents = [ ( master_doc, - "mriqc", - "mriqc Documentation", + 'mriqc', + 'mriqc Documentation', author, - "mriqc", - "One line description of project.", - "Miscellaneous", + 'mriqc', + 'One line description of project.', + 'Miscellaneous', ), ] @@ -317,12 +318,12 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "bids": ("https://bids-standard.github.io/pybids/", None), - "matplotlib": ("https://matplotlib.org/", None), - "nibabel": ("https://nipy.org/nibabel/", None), - "nipype": ("https://nipype.readthedocs.io/en/latest/", None), - "numpy": ("https://numpy.org/doc/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/dev/", None), - "python": ("https://docs.python.org/3/", None), - "scipy": ("https://docs.scipy.org/doc/scipy/reference", None), + 'bids': ('https://bids-standard.github.io/pybids/', None), + 'matplotlib': ('https://matplotlib.org/', None), + 'nibabel': ('https://nipy.org/nibabel/', None), + 'nipype': ('https://nipype.readthedocs.io/en/latest/', None), + 'numpy': ('https://numpy.org/doc/stable/', None), + 'pandas': ('https://pandas.pydata.org/pandas-docs/dev/', None), + 'python': ('https://docs.python.org/3/', None), + 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None), } diff --git a/mriqc/bin/abide2bids.py b/mriqc/bin/abide2bids.py index 9a9d68b1d..7dba458e7 100644 --- a/mriqc/bin/abide2bids.py +++ b/mriqc/bin/abide2bids.py @@ -21,6 +21,8 @@ # https://www.nipreps.org/community/licensing/ # """ABIDE2BIDS download tool.""" +from __future__ import annotations + import errno import json import os @@ -30,13 +32,15 @@ import tempfile from argparse import ArgumentParser, RawTextHelpFormatter from multiprocessing import Pool -from typing import Tuple -from xml.etree import ElementTree as et import numpy as np +from defusedxml import ElementTree as et from mriqc.bin import messages +_curl_cmd = shutil.which('curl') +_unzip_cmd = shutil.which('unzip') + def main(): """Entry point.""" @@ -99,7 +103,7 @@ def main(): ) -def fetch(args: Tuple[str, str, str, str]) -> Tuple[str, str]: +def fetch(args: (str, str, str, str)) -> (str, str): """ Downloads a subject and formats it into BIDS. @@ -128,9 +132,9 @@ def fetch(args: Tuple[str, str, str, str]) -> Tuple[str, str]: pkg_id = [u[9:] for u in url.split('/') if u.startswith('NITRC_IR_')][0] sub_file = op.join(tmpdir, '%s.zip' % pkg_id) - cmd = ['curl', '-s', '-u', f'{user}:{password}', '-o', sub_file, url] + cmd = [_curl_cmd, '-s', '-u', f'{user}:{password}', '-o', sub_file, url] sp.check_call(cmd) - sp.check_call(['unzip', '-qq', '-d', tmpdir, '-u', sub_file]) + sp.check_call([_unzip_cmd, '-qq', '-d', tmpdir, '-u', sub_file]) abide_root = op.join(tmpdir, 'ABIDE') files = [] diff --git a/mriqc/bin/fs2gif.py b/mriqc/bin/fs2gif.py index 0ffdd37bf..def8f1829 100644 --- a/mriqc/bin/fs2gif.py +++ b/mriqc/bin/fs2gif.py @@ -28,7 +28,7 @@ import subprocess as sp from argparse import ArgumentParser, RawTextHelpFormatter from errno import EEXIST -from shutil import rmtree +from shutil import rmtree, which from tempfile import mkdtemp import nibabel as nb @@ -131,7 +131,7 @@ def main(): tclfp.write(' RedrawScreen\n') tclfp.write( f' SaveTIFF [format "{tmp_sub}/{subid}-' - + '%03d.tif" $i]\n' + '%03d.tif" $i]\n' ) tclfp.write(' incr i\n') tclfp.write('}\n') @@ -155,7 +155,7 @@ def main(): print('Stacking coronal slices') sp.call( [ - 'convert', + which('convert'), '-delay', '10', '-loop', @@ -227,7 +227,7 @@ def main(): print('Stacking coronal slices') sp.call( [ - 'convert', + which('convert'), '-delay', '10', '-loop', @@ -238,7 +238,7 @@ def main(): ) sp.call( [ - 'convert', + which('convert'), '-delay', '10', '-loop', diff --git a/mriqc/bin/mriqcwebapi_test.py b/mriqc/bin/mriqcwebapi_test.py index 2bc334af1..27415ed3d 100644 --- a/mriqc/bin/mriqcwebapi_test.py +++ b/mriqc/bin/mriqcwebapi_test.py @@ -65,11 +65,12 @@ def main(): opts = get_parser().parse_args() get_log_message = messages.WEBAPI_GET.format(address=opts.webapi_url) MRIQC_LOG.info(get_log_message) - response = get(opts.webapi_url).json() + response = get(opts.webapi_url, timeout=5).json() n_records = response['_meta']['total'] response_log_message = messages.WEBAPI_REPORT.format(n_records=n_records) MRIQC_LOG.info(response_log_message) - assert opts.expected == n_records + if opts.expected != n_records: + raise AssertionError if __name__ == '__main__': diff --git a/mriqc/bin/subject_wrangler.py b/mriqc/bin/subject_wrangler.py index 2634862a0..a82381ef6 100644 --- a/mriqc/bin/subject_wrangler.py +++ b/mriqc/bin/subject_wrangler.py @@ -120,7 +120,7 @@ def main(): for i, subj in enumerate(subject_list): subject_list[i] = subj[4:] if subj.startswith('sub-') else subj - subject_list = sorted(list(set(subject_list))) + subject_list = sorted(set(subject_list)) if list(set(subject_list) - set(all_subjects)): non_exist = list(set(subject_list) - set(all_subjects)) diff --git a/mriqc/cli/parser.py b/mriqc/cli/parser.py index 1fd152437..a69dc7419 100644 --- a/mriqc/cli/parser.py +++ b/mriqc/cli/parser.py @@ -42,10 +42,10 @@ def _parse_participant_labels(value): ['s060'] """ - return sorted(set( + return sorted({ re.sub(r'^sub-', '', item.strip()) for item in re.split(r'\s+', f'{value}'.strip()) - )) + }) def _build_parser(): @@ -62,7 +62,10 @@ def _build_parser(): class DeprecateAction(Action): def __call__(self, parser, namespace, values, option_string=None): - warnings.warn(f'Argument {option_string} is deprecated and is *ignored*.') + warnings.warn( + f'Argument {option_string} is deprecated and is *ignored*.', + stacklevel=2, + ) delattr(namespace, self.dest) class ParticipantLabelAction(Action): @@ -489,7 +492,7 @@ def parse_args(args=None, namespace=None): # Load base plugin_settings from file if --use-plugin if opts.use_plugin is not None: - from yaml import load as loadyml + from yaml import safe_load as loadyml with open(opts.use_plugin) as f: plugin_settings = loadyml(f) @@ -590,9 +593,9 @@ def parse_args(args=None, namespace=None): ) # Check no DWI or others are sneaked into MRIQC - unknown_mods = set(config.workflow.inputs.keys()) - set( + unknown_mods = set(config.workflow.inputs.keys()) - { suffix.lower() for suffix in config.SUPPORTED_SUFFIXES - ) + } if unknown_mods: parser.error( 'MRIQC is unable to process the following modalities: ' diff --git a/mriqc/cli/run.py b/mriqc/cli/run.py index af359826d..71ca9b7d5 100644 --- a/mriqc/cli/run.py +++ b/mriqc/cli/run.py @@ -29,7 +29,7 @@ def main(): import gc import os import sys - from tempfile import mktemp + from tempfile import mkstemp from mriqc import config, messages from mriqc.cli.parser import parse_args @@ -50,9 +50,9 @@ def main(): # straightforward way to communicate with the child process is via the filesystem. # The config file name needs to be unique, otherwise multiple mriqc instances # will create write conflicts. - config_file = mktemp( + config_file = mkstemp( dir=config.execution.work_dir, prefix='.mriqc.', suffix='.toml' - ) + )[1] config.to_filename(config_file) config.file_path = config_file exitcode = 0 @@ -83,9 +83,9 @@ def main(): _resmon = ResourceRecorder( pid=os.getpid(), - log_file=mktemp( + log_file=mkstemp( dir=config.execution.work_dir, prefix='.resources.', suffix='.tsv' - ), + )[1], ) _resmon.start() diff --git a/mriqc/cli/version.py b/mriqc/cli/version.py index 9b202aa04..3c1e98f0b 100644 --- a/mriqc/cli/version.py +++ b/mriqc/cli/version.py @@ -21,7 +21,8 @@ # https://www.nipreps.org/community/licensing/ # """Version CLI helpers.""" -from datetime import datetime +from contextlib import suppress +from datetime import UTC, datetime from pathlib import Path import requests @@ -46,25 +47,24 @@ def check_latest(): cachefile = None if cachefile and cachefile.exists(): - try: + with suppress(Exception): latest, date = cachefile.read_text().split('|') - except Exception: - pass - else: + + if latest and date: try: latest = Version(latest) - date = datetime.strptime(date, DATE_FMT) + date = datetime.strptime(date, DATE_FMT).astimezone(UTC) except (InvalidVersion, ValueError): latest = None else: - if abs((datetime.now() - date).days) > RELEASE_EXPIRY_DAYS: + if abs((datetime.now(tz=UTC) - date).days) > RELEASE_EXPIRY_DAYS: outdated = True if latest is None or outdated is True: - try: + response = None + + with suppress(Exception): response = requests.get(url='https://pypi.org/pypi/mriqc/json', timeout=1.0) - except Exception: - response = None if response and response.status_code == 200: versions = [Version(rel) for rel in response.json()['releases'].keys()] @@ -75,12 +75,10 @@ def check_latest(): latest = None if cachefile is not None and latest is not None: - try: + with suppress(Exception): cachefile.write_text( - '|'.join(('%s' % latest, datetime.now().strftime(DATE_FMT))) + '|'.join(('%s' % latest, datetime.now(tz=UTC).strftime(DATE_FMT))) ) - except Exception: - pass return latest @@ -88,15 +86,14 @@ def check_latest(): def is_flagged(): """Check whether current version is flagged.""" # https://raw.githubusercontent.com/nipreps/mriqc/master/.versions.json - flagged = tuple() - try: + flagged = () + response = None + with suppress(Exception): response = requests.get( url="""\ https://raw.githubusercontent.com/nipreps/mriqc/master/.versions.json""", timeout=1.0, ) - except Exception: - response = None if response and response.status_code == 200: flagged = response.json().get('flagged', {}) or {} diff --git a/mriqc/config.py b/mriqc/config.py index 426934df0..c3373c6c3 100644 --- a/mriqc/config.py +++ b/mriqc/config.py @@ -89,6 +89,7 @@ """ import os import sys +from contextlib import suppress from pathlib import Path from time import strftime from uuid import uuid4 @@ -158,16 +159,15 @@ ) ) -try: +_free_mem_at_start = None +with suppress(Exception): from psutil import virtual_memory _free_mem_at_start = round(virtual_memory().free / 1024**3, 1) -except Exception: - _free_mem_at_start = None _oc_limit = 'n/a' _oc_policy = 'n/a' -try: +with suppress(Exception): # Memory policy may have a large effect on types of errors experienced _proc_oc_path = Path('/proc/sys/vm/overcommit_memory') if _proc_oc_path.exists(): @@ -185,22 +185,27 @@ _oc_limit = '{}%'.format( Path('/proc/sys/vm/overcommit_ratio').read_text().strip() ) -except Exception: - pass _memory_gb = None -try: - if 'linux' in sys.platform: + +if 'linux' in sys.platform: + with suppress(Exception): with open('/proc/meminfo') as f_in: _meminfo_lines = f_in.readlines() _mem_total_line = [line for line in _meminfo_lines if 'MemTotal' in line][0] _mem_total = float(_mem_total_line.split()[1]) _memory_gb = _mem_total / (1024.0**2) - elif 'darwin' in sys.platform: - _mem_str = os.popen('sysctl hw.memsize').read().strip().split(' ')[-1] - _memory_gb = float(_mem_str) / (1024.0**3) -except Exception: - pass +elif 'darwin' in sys.platform: + from shutil import which + from subprocess import check_output + + if (_cmd := which('sysctl')): + with suppress(Exception): + _mem_str = check_output( + [_cmd, 'hw.memsize'] + ).decode().strip().split(' ')[-1] + _memory_gb = float(_mem_str) / (1024.0**3) + file_path: Path = None """ @@ -211,7 +216,7 @@ class _Config: """An abstract class forbidding instantiation.""" - _paths = tuple() + _paths = () def __init__(self): """Avert instantiation.""" diff --git a/mriqc/data/config.py b/mriqc/data/config.py index 77273c0b7..38a7fb673 100644 --- a/mriqc/data/config.py +++ b/mriqc/data/config.py @@ -42,6 +42,7 @@ def __init__(self): loader=jinja2.FileSystemLoader(searchpath='/'), trim_blocks=True, lstrip_blocks=True, + autoescape=True, ) def compile(self, configs): diff --git a/mriqc/engine/plugin.py b/mriqc/engine/plugin.py index 960a4da14..0d18236ff 100644 --- a/mriqc/engine/plugin.py +++ b/mriqc/engine/plugin.py @@ -53,7 +53,7 @@ def run_node(node, updatehash, taskid): """ # Init variables - result = dict(result=None, traceback=None, taskid=taskid) + result = {'result': None, 'traceback': None, 'taskid': taskid} # Try and execute the node via node.run() try: @@ -174,7 +174,7 @@ def run(self, graph, config, updatehash=False): taskid, jobid = self.pending_tasks.pop() try: result = self._get_result(taskid) - except Exception as exc: + except Exception as exc: # noqa: BLE001 notrun.append(self._clean_queue(jobid, graph)) errors.append(exc) else: @@ -187,7 +187,9 @@ def run(self, graph, config, updatehash=False): self._remove_node_dirs() self._clear_task(taskid) else: - assert self.proc_done[jobid] and self.proc_pending[jobid] + if not (self.proc_done[jobid] and self.proc_pending[jobid]): + raise RuntimeError( + f'Plugin error while appending task <{taskid}> with ID {jobid}.') toappend.insert(0, (taskid, jobid)) if toappend: @@ -357,12 +359,16 @@ def _remove_node_deps(self, jobid, crashfile, graph): dfs_preorder = nx.dfs_preorder except AttributeError: dfs_preorder = nx.dfs_preorder_nodes - subnodes = [s for s in dfs_preorder(graph, self.procs[jobid])] + subnodes = list(dfs_preorder(graph, self.procs[jobid])) for node in subnodes: idx = self.procs.index(node) self.proc_done[idx] = True self.proc_pending[idx] = False - return dict(node=self.procs[jobid], dependents=subnodes, crashfile=crashfile) + return { + 'node': self.procs[jobid], + 'dependents': subnodes, + 'crashfile': crashfile, + } def _remove_node_dirs(self): """Remove directories whose outputs have already been used up.""" diff --git a/mriqc/instrumentation/resources.py b/mriqc/instrumentation/resources.py index 9e4bb3ca5..82b86ff61 100644 --- a/mriqc/instrumentation/resources.py +++ b/mriqc/instrumentation/resources.py @@ -23,7 +23,7 @@ """Instrumentation to profile resource utilization.""" import signal from contextlib import suppress -from datetime import datetime +from datetime import UTC, datetime from multiprocessing import Event, Process from pathlib import Path from time import sleep, time_ns @@ -74,7 +74,7 @@ def sample( pid=None, recursive=True, attrs=SAMPLE_ATTRS, - exclude=tuple(), + exclude=(), ): """ Probe process tree and snapshot current resource utilization. @@ -127,7 +127,7 @@ def parse_sample(datapoint, timestamp=None, attrs=SAMPLE_ATTRS): def sample2file( - pid=None, recursive=True, timestamp=None, fd=None, flush=True, exclude=tuple() + pid=None, recursive=True, timestamp=None, fd=None, flush=True, exclude=() ): if fd is None: return @@ -159,7 +159,7 @@ def __init__( Path(log_file if log_file is not None else f'.prof-{pid}.tsv').absolute() ) """An open file descriptor where results are dumped.""" - self._exclude = exclude_probe or tuple() + self._exclude = exclude_probe or () """A list/tuple containing PIDs that should not be monitored.""" self._freq_ns = int(max(frequency, 0.02) * 1e9) """Sampling frequency (stored in ns).""" @@ -179,7 +179,7 @@ def run(self, *args, **kwargs): # Write headers (comment trace + header row) _header = [ f"# MRIQC Resource recorder started tracking PID {self._pid} " - f"{datetime.now().strftime('(%Y/%m/%d; %H:%M:%S)')}", + f"{datetime.now(tz=UTC).strftime('(%Y/%m/%d; %H:%M:%S)')}", '\t'.join(('timestamp', *SAMPLE_ATTRS)).replace( 'memory_info', 'mem_rss_mb\tmem_vsm_mb' ), @@ -202,7 +202,7 @@ def run(self, *args, **kwargs): except psutil.NoSuchProcess: print( f"# MRIQC Resource recorder killed " - f"{datetime.now().strftime('(%Y/%m/%d; %H:%M:%S)')}", + f"{datetime.now(tz=UTC).strftime('(%Y/%m/%d; %H:%M:%S)')}", file=_logfile, ) _logfile.flush() @@ -220,5 +220,5 @@ def stop(self, *args): with Path(self._logfile).open('a') as f: f.write( f"# MRIQC Resource recorder finished " - f"{datetime.now().strftime('(%Y/%m/%d; %H:%M:%S)')}", + f"{datetime.now(tz=UTC).strftime('(%Y/%m/%d; %H:%M:%S)')}", ) diff --git a/mriqc/instrumentation/viz.py b/mriqc/instrumentation/viz.py index f0e1975b7..fa1beedfd 100644 --- a/mriqc/instrumentation/viz.py +++ b/mriqc/instrumentation/viz.py @@ -28,7 +28,7 @@ _TIME_LABEL = 'runtime' -def plot(filename, param='mem_vsm_mb', mask_processes=tuple(), out_file=None): +def plot(filename, param='mem_vsm_mb', mask_processes=(), out_file=None): """Plot a recording file.""" data = pd.read_csv(filename, sep=r'\s+', comment='#') diff --git a/mriqc/interfaces/anatomical.py b/mriqc/interfaces/anatomical.py index ba4918aee..7299f458a 100644 --- a/mriqc/interfaces/anatomical.py +++ b/mriqc/interfaces/anatomical.py @@ -456,7 +456,7 @@ def _run_interface(self, runtime): label_im, nb_labels = nd.label(mask) if nb_labels > 2: sizes = nd.sum(mask, label_im, list(range(nb_labels + 1))) - ordered = list(reversed(sorted(zip(sizes, list(range(nb_labels + 1)))))) + ordered = sorted(zip(sizes, list(range(nb_labels + 1))), reverse=True) for _, label in ordered[2:]: mask[label_im == label] = 0 diff --git a/mriqc/interfaces/common/__init__.py b/mriqc/interfaces/common/__init__.py index 8e622064a..9ac1b1bb2 100644 --- a/mriqc/interfaces/common/__init__.py +++ b/mriqc/interfaces/common/__init__.py @@ -20,13 +20,10 @@ # # https://www.nipreps.org/community/licensing/ # -from mriqc.interfaces.common.conform_image import ( - ConformImage, - ConformImageInputSpec, - ConformImageOutputSpec, -) -from mriqc.interfaces.common.ensure_size import ( - EnsureSize, - EnsureSizeInputSpec, - EnsureSizeOutputSpec, +from mriqc.interfaces.common.conform_image import ConformImage +from mriqc.interfaces.common.ensure_size import EnsureSize + +__all__ = ( + 'ConformImage', + 'EnsureSize', ) diff --git a/mriqc/interfaces/common/conform_image.py b/mriqc/interfaces/common/conform_image.py index e6def65a2..18f660323 100644 --- a/mriqc/interfaces/common/conform_image.py +++ b/mriqc/interfaces/common/conform_image.py @@ -36,7 +36,6 @@ ) from mriqc import config, messages -from mriqc.interfaces import data_types #: Output file name format. OUT_FILE = '{prefix}_conformed{ext}' @@ -74,22 +73,60 @@ class ConformImageOutputSpec(TraitedSpec): class ConformImage(SimpleInterface): - f""" + """ Conforms an input image. List of nifti datatypes: .. note: Original Analyze 7.5 types - {data_types.ANALYZE_75} + DT_NONE 0 + DT_UNKNOWN 0 / what it says, dude / + DT_BINARY 1 / binary (1 bit/voxel) / + DT_UNSIGNED_CHAR 2 / unsigned char (8 bits/voxel) / + DT_SIGNED_SHORT 4 / signed short (16 bits/voxel) / + DT_SIGNED_INT 8 / signed int (32 bits/voxel) / + DT_FLOAT 16 / float (32 bits/voxel) / + DT_COMPLEX 32 / complex (64 bits/voxel) / + DT_DOUBLE 64 / double (64 bits/voxel) / + DT_RGB 128 / RGB triple (24 bits/voxel) / + DT_ALL 255 / not very useful (?) / .. note: Added names for the same data types - {data_types.ADDED} - - .. note: New codes for NIFTI - - {data_types.NEW_CODES} + DT_UINT8 2 + DT_INT16 4 + DT_INT32 8 + DT_FLOAT32 16 + DT_COMPLEX64 32 + DT_FLOAT64 64 + DT_RGB24 128 + + .. note: New codes for NIfTI + + DT_INT8 256 / signed char (8 bits) / + DT_UINT16 512 / unsigned short (16 bits) / + DT_UINT32 768 / unsigned int (32 bits) / + DT_INT64 1024 / long long (64 bits) / + DT_UINT64 1280 / unsigned long long (64 bits) / + DT_FLOAT128 1536 / long double (128 bits) / + DT_COMPLEX128 1792 / double pair (128 bits) / + DT_COMPLEX256 2048 / long double pair (256 bits) / + NIFTI_TYPE_UINT8 2 /! unsigned char. / + NIFTI_TYPE_INT16 4 /! signed short. / + NIFTI_TYPE_INT32 8 /! signed int. / + NIFTI_TYPE_FLOAT32 16 /! 32 bit float. / + NIFTI_TYPE_COMPLEX64 32 /! 64 bit complex = 2 32 bit floats. / + NIFTI_TYPE_FLOAT64 64 /! 64 bit float = double. / + NIFTI_TYPE_RGB24 128 /! 3 8 bit bytes. / + NIFTI_TYPE_INT8 256 /! signed char. / + NIFTI_TYPE_UINT16 512 /! unsigned short. / + NIFTI_TYPE_UINT32 768 /! unsigned int. / + NIFTI_TYPE_INT64 1024 /! signed long long. / + NIFTI_TYPE_UINT64 1280 /! unsigned long long. / + NIFTI_TYPE_FLOAT128 1536 /! 128 bit float = long double. / + NIFTI_TYPE_COMPLEX128 1792 /! 128 bit complex = 2 64 bit floats. / + NIFTI_TYPE_COMPLEX256 2048 /! 256 bit complex = 2 128 bit floats / """ diff --git a/mriqc/interfaces/data_types.py b/mriqc/interfaces/data_types.py deleted file mode 100644 index 1ddb9ad67..000000000 --- a/mriqc/interfaces/data_types.py +++ /dev/null @@ -1,69 +0,0 @@ -# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set ft=python sts=4 ts=4 sw=4 et: -# -# Copyright 2021 The NiPreps Developers -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# We support and encourage derived works from this project, please read -# about our expectations at -# -# https://www.nipreps.org/community/licensing/ -# -ANALYZE_75 = """ - DT_NONE 0 - DT_UNKNOWN 0 / what it says, dude / - DT_BINARY 1 / binary (1 bit/voxel) / - DT_UNSIGNED_CHAR 2 / unsigned char (8 bits/voxel) / - DT_SIGNED_SHORT 4 / signed short (16 bits/voxel) / - DT_SIGNED_INT 8 / signed int (32 bits/voxel) / - DT_FLOAT 16 / float (32 bits/voxel) / - DT_COMPLEX 32 / complex (64 bits/voxel) / - DT_DOUBLE 64 / double (64 bits/voxel) / - DT_RGB 128 / RGB triple (24 bits/voxel) / - DT_ALL 255 / not very useful (?) / -""" -ADDED = """ - DT_UINT8 2 - DT_INT16 4 - DT_INT32 8 - DT_FLOAT32 16 - DT_COMPLEX64 32 - DT_FLOAT64 64 - DT_RGB24 128 -""" -NEW_CODES = """ - DT_INT8 256 / signed char (8 bits) / - DT_UINT16 512 / unsigned short (16 bits) / - DT_UINT32 768 / unsigned int (32 bits) / - DT_INT64 1024 / long long (64 bits) / - DT_UINT64 1280 / unsigned long long (64 bits) / - DT_FLOAT128 1536 / long double (128 bits) / - DT_COMPLEX128 1792 / double pair (128 bits) / - DT_COMPLEX256 2048 / long double pair (256 bits) / - NIFTI_TYPE_UINT8 2 /! unsigned char. / - NIFTI_TYPE_INT16 4 /! signed short. / - NIFTI_TYPE_INT32 8 /! signed int. / - NIFTI_TYPE_FLOAT32 16 /! 32 bit float. / - NIFTI_TYPE_COMPLEX64 32 /! 64 bit complex = 2 32 bit floats. / - NIFTI_TYPE_FLOAT64 64 /! 64 bit float = double. / - NIFTI_TYPE_RGB24 128 /! 3 8 bit bytes. / - NIFTI_TYPE_INT8 256 /! signed char. / - NIFTI_TYPE_UINT16 512 /! unsigned short. / - NIFTI_TYPE_UINT32 768 /! unsigned int. / - NIFTI_TYPE_INT64 1024 /! signed long long. / - NIFTI_TYPE_UINT64 1280 /! unsigned long long. / - NIFTI_TYPE_FLOAT128 1536 /! 128 bit float = long double. / - NIFTI_TYPE_COMPLEX128 1792 /! 128 bit complex = 2 64 bit floats. / - NIFTI_TYPE_COMPLEX256 2048 /! 256 bit complex = 2 128 bit floats / -""" diff --git a/mriqc/interfaces/datalad.py b/mriqc/interfaces/datalad.py index 5b21b4cbe..6f54ec959 100644 --- a/mriqc/interfaces/datalad.py +++ b/mriqc/interfaces/datalad.py @@ -91,7 +91,7 @@ def get(*args, **kwargs): """Mock datalad get.""" dataset_path = Path(dataset_path) - for field, value in inputs.items(): + for value in inputs.values(): if not isdefined(value): continue @@ -110,7 +110,7 @@ def get(*args, **kwargs): _pth, dataset=dataset_path ) - except Exception as exc: + except Exception as exc: # noqa: BLE001 config.loggers.interface.warning(f'datalad get on {_pth} failed.') if ( config.environment.exec_env == 'docker' diff --git a/mriqc/interfaces/webapi.py b/mriqc/interfaces/webapi.py index 3960e9c97..86c1c7b56 100644 --- a/mriqc/interfaces/webapi.py +++ b/mriqc/interfaces/webapi.py @@ -246,7 +246,10 @@ def upload_qc_metrics( try: # if the modality is bold, call "bold" endpoint response = requests.post( - f'{endpoint}/{modality}', headers=headers, data=dumps(data) + f'{endpoint}/{modality}', + headers=headers, + data=dumps(data), + timeout=15, ) except requests.ConnectionError as err: errmsg = ( diff --git a/mriqc/qc/tests/test_anatomical.py b/mriqc/qc/tests/test_anatomical.py index 31eb01e1a..dcad3fa4f 100644 --- a/mriqc/qc/tests/test_anatomical.py +++ b/mriqc/qc/tests/test_anatomical.py @@ -110,4 +110,5 @@ def test_qi2(gtruth, sigma): data, _, bgdata = gtruth.get_data(sigma, rice) value, _ = art_qi2(data, bgdata, save_plot=False) rmtree(tmpdir) - assert value > 0.0 and value < 0.04 + assert value > 0.0 + assert value < 0.04 diff --git a/mriqc/reports/group.py b/mriqc/reports/group.py index e03ad6c5f..9ed8f8ce3 100644 --- a/mriqc/reports/group.py +++ b/mriqc/reports/group.py @@ -30,8 +30,8 @@ def gen_html(csv_file, mod, csv_failed=None, out_file=None): - import datetime import os.path as op + from datetime import UTC, datetime from niworkflows.data import Loader @@ -264,7 +264,7 @@ def gen_html(csv_file, mod, csv_failed=None, out_file=None): tpl.generate_conf( { 'modality': mod, - 'timestamp': datetime.datetime.now().strftime('%Y-%m-%d, %H:%M'), + 'timestamp': datetime.now(tz=UTC).strftime('%Y-%m-%d, %H:%M'), 'version': ver, 'csv_groups': csv_groups, 'failed': failed, diff --git a/mriqc/synthstrip/model.py b/mriqc/synthstrip/model.py index e78fc0f4b..ddedaec2e 100644 --- a/mriqc/synthstrip/model.py +++ b/mriqc/synthstrip/model.py @@ -128,7 +128,7 @@ def __init__( # now we take care of any remaining convolutions self.remaining = nn.ModuleList() - for num, nf in enumerate(final_convs): + for _num, nf in enumerate(final_convs): self.remaining.append(ConvBlock(ndims, prev_nf, nf)) prev_nf = nf diff --git a/mriqc/tests/test_config.py b/mriqc/tests/test_config.py index 689c77a56..048df372b 100644 --- a/mriqc/tests/test_config.py +++ b/mriqc/tests/test_config.py @@ -56,11 +56,11 @@ def _expand_bids(tmp_path, testdata_path, testcase): @pytest.mark.parametrize( 'testcase', - ( + [ 'gh921-dmd-20220428-0', 'gh921-dmd-20230319-0', 'gh1086-ds004134', - ), + ], ) def test_bids_indexing_manifest(tmp_path, testdata_path, testcase): """Check ``BIDSLayout`` is indexing what it should.""" diff --git a/mriqc/tests/test_reports.py b/mriqc/tests/test_reports.py index 2b104d866..650089d24 100644 --- a/mriqc/tests/test_reports.py +++ b/mriqc/tests/test_reports.py @@ -29,7 +29,7 @@ @pytest.mark.parametrize( - 'dataset,subject', [ + ('dataset', 'subject'), [ ('ds002785', '0017'), ('ds002785', '0042'), ] diff --git a/mriqc/utils/debug.py b/mriqc/utils/debug.py index befda6b88..43a46ada9 100644 --- a/mriqc/utils/debug.py +++ b/mriqc/utils/debug.py @@ -57,13 +57,13 @@ def setup_exceptionhook(ipython=False): pdb.post_mortem; if not interactive, then invokes default handler. """ - def _pdb_excepthook(type, value, tb): + def _pdb_excepthook(exc_type, value, tb): import traceback - traceback.print_exception(type, value, tb) + traceback.print_exception(exc_type, value, tb) print() if is_interactive(): - import pdb + import pdb # noqa: T100 pdb.post_mortem(tb) diff --git a/mriqc/workflows/anatomical/base.py b/mriqc/workflows/anatomical/base.py index 48770b917..0fb6ec579 100644 --- a/mriqc/workflows/anatomical/base.py +++ b/mriqc/workflows/anatomical/base.py @@ -804,7 +804,7 @@ def gradient_threshold(in_file, brainmask, thresh=15.0, out_file=None, aniso=Fal artmsk = np.zeros_like(mask) if nb_labels > 2: sizes = sim.sum(mask, label_im, list(range(nb_labels + 1))) - ordered = list(reversed(sorted(zip(sizes, list(range(nb_labels + 1)))))) + ordered = sorted(zip(sizes, list(range(nb_labels + 1))), reversed=True) for _, label in ordered[2:]: mask[label_im == label] = 0 artmsk[label_im == label] = 1 @@ -817,15 +817,17 @@ def gradient_threshold(in_file, brainmask, thresh=15.0, out_file=None, aniso=Fal def _get_imgtype(in_file): - from pathlib import Path + from mriqc.workflows.anatomical.base import _get_mod - return int(Path(in_file).name.rstrip('.gz').rstrip('.nii').split('_')[-1][1]) + return int(_get_mod(in_file)[1]) def _get_mod(in_file): from pathlib import Path - return Path(in_file).name.rstrip('.gz').rstrip('.nii').split('_')[-1] + in_file = Path(in_file) + extension = ''.join(in_file.suffixes) + return in_file.name.replace(extension, '').split('_')[-1] def _pop(inlist): diff --git a/mriqc/workflows/diffusion/base.py b/mriqc/workflows/diffusion/base.py index be8ed8914..1f24c299c 100644 --- a/mriqc/workflows/diffusion/base.py +++ b/mriqc/workflows/diffusion/base.py @@ -637,9 +637,9 @@ def _tolist(value): def _get_bvals(bmatrix): - import numpy + import numpy as np - return numpy.squeeze(bmatrix[:, -1]).tolist() + return np.squeeze(bmatrix[:, -1]).tolist() def _first(inlist): diff --git a/pyproject.toml b/pyproject.toml index 22071fc9b..c88c75783 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -256,9 +256,12 @@ inline-quotes = "single" [tool.ruff.lint.extend-per-file-ignores] "*/test_*.py" = ["S101"] -"fmriprep/utils/debug.py" = ["A002", "T100"] -"docs/conf.py" = ["A001"] -"docs/sphinxext/github_link.py" = ["BLE001"] +"docs/source/conf.py" = ["A001"] +"mriqc/bin/nib_hash.py" = ["S324"] +"mriqc/config.py" = ["S105"] +"mriqc/conftest.py" = ["PT004"] +"mriqc/engine/plugin.py" = ["BLE001"] +"mriqc/utils/debug.py" = ["A002", "T100"] [tool.ruff.format] quote-style = "single"