From 4941bf09fef4ac49a061abca9c247cd18c3aa51b Mon Sep 17 00:00:00 2001
From: Dimitri Papadopoulos
<3234522+DimitriPapadopoulos@users.noreply.github.com>
Date: Wed, 22 Nov 2023 00:00:49 +0100
Subject: [PATCH] STY: Use f-strings where possible
As suggested by `pyupgrade --py38-plus`:
https://github.com/asottile/pyupgrade#f-strings
---
niworkflows/interfaces/bids.py | 6 ++----
niworkflows/interfaces/cifti.py | 2 +-
niworkflows/interfaces/confounds.py | 6 +++---
niworkflows/interfaces/fixes.py | 6 +++---
niworkflows/interfaces/freesurfer.py | 4 ++--
niworkflows/interfaces/header.py | 2 +-
niworkflows/interfaces/nibabel.py | 2 +-
niworkflows/interfaces/tests/test_bids.py | 2 +-
niworkflows/interfaces/utility.py | 6 +++---
niworkflows/reports/core.py | 4 ++--
niworkflows/utils/bids.py | 2 +-
niworkflows/utils/misc.py | 17 +++++++----------
niworkflows/utils/spaces.py | 3 ++-
niworkflows/utils/tests/test_utils.py | 4 ++--
niworkflows/viz/plots.py | 10 +++++-----
niworkflows/viz/utils.py | 6 +++---
16 files changed, 39 insertions(+), 43 deletions(-)
diff --git a/niworkflows/interfaces/bids.py b/niworkflows/interfaces/bids.py
index 21a4e43f4a0..2c139c6796f 100644
--- a/niworkflows/interfaces/bids.py
+++ b/niworkflows/interfaces/bids.py
@@ -269,14 +269,12 @@ def _run_interface(self, runtime):
if self._require_t1w and not bids_dict['t1w']:
raise FileNotFoundError(
- "No T1w images found for subject sub-{}".format(self.inputs.subject_id)
+ f"No T1w images found for subject sub-{self.inputs.subject_id}"
)
if self._require_funcs and not bids_dict["bold"]:
raise FileNotFoundError(
- "No functional images found for subject sub-{}".format(
- self.inputs.subject_id
- )
+ f"No functional images found for subject sub-{self.inputs.subject_id}"
)
for imtype in ["bold", "t2w", "flair", "fmap", "sbref", "roi", "pet", "asl"]:
diff --git a/niworkflows/interfaces/cifti.py b/niworkflows/interfaces/cifti.py
index f7928029ccf..34b8e1032cf 100644
--- a/niworkflows/interfaces/cifti.py
+++ b/niworkflows/interfaces/cifti.py
@@ -400,6 +400,6 @@ def _create_cifti_image(
img.set_data_dtype(bold_img.get_data_dtype())
img.nifti_header.set_intent("NIFTI_INTENT_CONNECTIVITY_DENSE_SERIES")
- out_file = "{}.dtseries.nii".format(split_filename(bold_file)[1])
+ out_file = f"{split_filename(bold_file)[1]}.dtseries.nii"
ci.save(img, out_file)
return Path.cwd() / out_file
diff --git a/niworkflows/interfaces/confounds.py b/niworkflows/interfaces/confounds.py
index 0a7e7c1ac7b..bcaf6b327f6 100644
--- a/niworkflows/interfaces/confounds.py
+++ b/niworkflows/interfaces/confounds.py
@@ -319,7 +319,7 @@ def spike_regressors(
spikes = np.zeros((max(indices) + 1, len(mask)))
for i, m in enumerate(sorted(mask)):
spikes[m, i] = 1
- header = ["{:s}{:02d}".format(header_prefix, vol) for vol in range(len(mask))]
+ header = [f"{header_prefix}{vol:02d}" for vol in range(len(mask))]
spikes = pd.DataFrame(data=spikes, columns=header)
if concatenate:
return pd.concat((data, spikes), axis=1)
@@ -360,7 +360,7 @@ def temporal_derivatives(order, variables, data):
variables_deriv[0] = variables
order = set(order) - set([0])
for o in order:
- variables_deriv[o] = ["{}_derivative{}".format(v, o) for v in variables]
+ variables_deriv[o] = [f"{v}_derivative{o}" for v in variables]
data_deriv[o] = np.tile(np.nan, data[variables].shape)
data_deriv[o][o:, :] = np.diff(data[variables], n=o, axis=0)
variables_deriv = reduce(operator.add, variables_deriv.values())
@@ -403,7 +403,7 @@ def exponential_terms(order, variables, data):
variables_exp[1] = variables
order = set(order) - set([1])
for o in order:
- variables_exp[o] = ["{}_power{}".format(v, o) for v in variables]
+ variables_exp[o] = [f"{v}_power{o}" for v in variables]
data_exp[o] = data[variables] ** o
variables_exp = reduce(operator.add, variables_exp.values())
data_exp = pd.DataFrame(
diff --git a/niworkflows/interfaces/fixes.py b/niworkflows/interfaces/fixes.py
index f9aef937281..a94348af55e 100644
--- a/niworkflows/interfaces/fixes.py
+++ b/niworkflows/interfaces/fixes.py
@@ -68,7 +68,7 @@ def _run_interface(self, runtime, correct_return_codes=(0,)):
_copyxform(
self.inputs.reference_image,
os.path.abspath(self._gen_filename("output_image")),
- message="%s (niworkflows v%s)" % (self.__class__.__name__, __version__),
+ message=f"{self.__class__.__name__} (niworkflows v{__version__})",
)
return runtime
@@ -110,7 +110,7 @@ def _run_interface(self, runtime, correct_return_codes=(0,)):
_copyxform(
self.inputs.fixed_image[0],
os.path.abspath(out_file),
- message="%s (niworkflows v%s)" % (self.__class__.__name__, __version__),
+ message=f"{self.__class__.__name__} (niworkflows v{__version__})",
)
# Inverse transform
@@ -119,7 +119,7 @@ def _run_interface(self, runtime, correct_return_codes=(0,)):
_copyxform(
self.inputs.moving_image[0],
os.path.abspath(out_file),
- message="%s (niworkflows v%s)" % (self.__class__.__name__, __version__),
+ message=f"{self.__class__.__name__} (niworkflows v{__version__})",
)
return runtime
diff --git a/niworkflows/interfaces/freesurfer.py b/niworkflows/interfaces/freesurfer.py
index 5b31360387f..392bbe01cfb 100644
--- a/niworkflows/interfaces/freesurfer.py
+++ b/niworkflows/interfaces/freesurfer.py
@@ -552,7 +552,7 @@ def medial_wall_to_nan(in_file, subjects_dir, den=None, newpath=None):
if target_subject.startswith("fsaverage"):
cortex = nb.freesurfer.read_label(
os.path.join(
- subjects_dir, target_subject, "label", "{}.cortex.label".format(fn[:2])
+ subjects_dir, target_subject, "label", f"{fn[:2]}.cortex.label"
)
)
medial = np.delete(np.arange(len(func.darrays[0].data)), cortex)
@@ -578,7 +578,7 @@ def mri_info(fname, argument):
import subprocess as sp
import numpy as np
- cmd_info = "mri_info --%s %s" % (argument, fname)
+ cmd_info = f"mri_info --{argument} {fname}"
proc = sp.Popen(cmd_info, stdout=sp.PIPE, shell=True)
data = bytearray(proc.stdout.read())
mstring = np.fromstring(data.decode("utf-8"), sep="\n")
diff --git a/niworkflows/interfaces/header.py b/niworkflows/interfaces/header.py
index 18808ed6a5b..54721604abf 100644
--- a/niworkflows/interfaces/header.py
+++ b/niworkflows/interfaces/header.py
@@ -315,7 +315,7 @@ def _run_interface(self, runtime):
Analyses of this dataset MAY BE INVALID.
"""
- snippet = '%s
\n%s\n' % (warning_txt, description)
+ snippet = f'{warning_txt}
\n{description}\n'
# Store new file and report
img.to_filename(out_fname)
with open(out_report, "w") as fobj:
diff --git a/niworkflows/interfaces/nibabel.py b/niworkflows/interfaces/nibabel.py
index d7d7546b22d..58766138581 100644
--- a/niworkflows/interfaces/nibabel.py
+++ b/niworkflows/interfaces/nibabel.py
@@ -432,7 +432,7 @@ def _run_interface(self, runtime):
self.inputs.moving_image,
fov_mask=self.inputs.fov_mask,
force_xform_code=self.inputs.xform_code,
- message="%s (niworkflows v%s)" % (self.__class__.__name__, __version__),
+ message=f"{self.__class__.__name__} (niworkflows v{__version__})",
newpath=runtime.cwd,
)
return runtime
diff --git a/niworkflows/interfaces/tests/test_bids.py b/niworkflows/interfaces/tests/test_bids.py
index 365a62356a8..0119aa883e2 100644
--- a/niworkflows/interfaces/tests/test_bids.py
+++ b/niworkflows/interfaces/tests/test_bids.py
@@ -652,7 +652,7 @@ def test_ReadSidecarJSON_connection(testdata_dir, field):
"derivatives, subjects_dir",
[
(os.getenv("FREESURFER_HOME"), "subjects"),
- ("/tmp", "%s/%s" % (os.getenv("FREESURFER_HOME"), "subjects")),
+ ("/tmp", "{}/{}".format((os.getenv("FREESURFER_HOME"), "subjects"))),
],
)
def test_fsdir_noaction(derivatives, subjects_dir):
diff --git a/niworkflows/interfaces/utility.py b/niworkflows/interfaces/utility.py
index b0c6684dd5d..1fb96c41f3c 100644
--- a/niworkflows/interfaces/utility.py
+++ b/niworkflows/interfaces/utility.py
@@ -201,13 +201,13 @@ def _check_len(self, name, new):
if name in self._fields:
if isinstance(new, str) or len(new) < 1:
raise ValueError(
- 'Trying to set an invalid value (%s) for input "%s"' % (new, name)
+ f'Trying to set an invalid value ({new}) for input "{name}"'
)
if len(new) != len(self.inputs.keys):
raise ValueError(
- 'Length of value (%s) for input field "%s" does not match '
- "the length of the indexing list." % (new, name)
+ f'Length of value ({new}) for input field "{name}" '
+ "does not match the length of the indexing list."
)
def _run_interface(self, runtime):
diff --git a/niworkflows/reports/core.py b/niworkflows/reports/core.py
index 81c021bada7..331d70fa752 100644
--- a/niworkflows/reports/core.py
+++ b/niworkflows/reports/core.py
@@ -303,7 +303,7 @@ def _load_config(self, config):
self.out_dir = self.out_dir / self.packagename
if self.subject_id is not None:
- self.root = self.root / "sub-{}".format(self.subject_id)
+ self.root = self.root / f"sub-{self.subject_id}"
if "template_path" in settings:
self.template_path = config.parent / settings["template_path"]
@@ -371,7 +371,7 @@ def index(self, config):
# Populate errors section
error_dir = (
- self.out_dir / "sub-{}".format(self.subject_id) / "log" / self.run_uuid
+ self.out_dir / f"sub-{self.subject_id}" / "log" / self.run_uuid
)
if error_dir.is_dir():
from ..utils.misc import read_crashfile
diff --git a/niworkflows/utils/bids.py b/niworkflows/utils/bids.py
index fdcfbd586f3..8866c5e85ea 100644
--- a/niworkflows/utils/bids.py
+++ b/niworkflows/utils/bids.py
@@ -487,4 +487,4 @@ def check_pipeline_version(cvers, data_desc):
desc = json.loads(data_desc.read_text())
dvers = desc.get("PipelineDescription", {}).get("Version", "0+unknown")
if Version(cvers).public != Version(dvers).public:
- return "Previous output generated by version {} found.".format(dvers)
+ return f"Previous output generated by version {dvers} found."
diff --git a/niworkflows/utils/misc.py b/niworkflows/utils/misc.py
index 8b8802a9170..3c0fc798db0 100644
--- a/niworkflows/utils/misc.py
+++ b/niworkflows/utils/misc.py
@@ -115,20 +115,17 @@ def get_template_specs(
tpl_target_path = tf.get(in_template, **template_spec)
if not tpl_target_path:
raise RuntimeError(
- """\
-Could not find template "{0}" with specs={1}. Please revise your template \
-argument.""".format(
- in_template, template_spec
- )
+ f"""\
+Could not find template "{in_template}" with specs={template_spec}. \
+Please revise your template argument."""
)
if isinstance(tpl_target_path, list):
+ tpl_target_path = ", ".join([str(p) for p in tpl_target_path])
raise RuntimeError(
- """\
-The available template modifiers ({0}) did not select a unique template \
-(got "{1}"). Please revise your template argument.""".format(
- template_spec, ", ".join([str(p) for p in tpl_target_path])
- )
+ f"""\
+The available template modifiers ({template_spec}) did not select a unique \
+template (got "{tpl_target_path}"). Please revise your template argument."""
)
return str(tpl_target_path), common_spec
diff --git a/niworkflows/utils/spaces.py b/niworkflows/utils/spaces.py
index 5fddda253a4..aec2337137f 100644
--- a/niworkflows/utils/spaces.py
+++ b/niworkflows/utils/spaces.py
@@ -207,7 +207,8 @@ def fullname(self):
"""
if "cohort" not in self.spec:
return self.space
- return "%s:cohort-%s" % (self.space, self.spec["cohort"])
+ cohort = self.spec["cohort"]
+ return f"{self.space}:cohort-{self.spec['cohort']}"
@property
def legacyname(self):
diff --git a/niworkflows/utils/tests/test_utils.py b/niworkflows/utils/tests/test_utils.py
index 6c0914212a7..c8711505648 100644
--- a/niworkflows/utils/tests/test_utils.py
+++ b/niworkflows/utils/tests/test_utils.py
@@ -35,8 +35,8 @@ def test_copy_gzip(tmpdir):
check_call(["gzip", "-N", str(filepath)])
assert not filepath.exists()
- gzpath1 = "%s/%s" % (tmpdir, "name1.txt.gz")
- gzpath2 = "%s/%s" % (tmpdir, "name2.txt.gz")
+ gzpath1 = f"{tmpdir}/name1.txt.gz"
+ gzpath2 = f"{tmpdir}/name2.txt.gz"
_copy_any(gzpath1, gzpath2)
assert Path(gzpath2).exists()
check_call(["gunzip", "-N", "-f", gzpath2])
diff --git a/niworkflows/viz/plots.py b/niworkflows/viz/plots.py
index a4659ed5b58..0fc5071d6b7 100644
--- a/niworkflows/viz/plots.py
+++ b/niworkflows/viz/plots.py
@@ -761,7 +761,7 @@ def compcor_variance_plot(
if len(metadata_files) == 1:
metadata_sources = ["CompCor"]
else:
- metadata_sources = ["Decomposition {:d}".format(i) for i in range(len(metadata_files))]
+ metadata_sources = [f"Decomposition {i:d}" for i in range(len(metadata_files))]
for file, source in zip(metadata_files, metadata_sources):
metadata[source] = pd.read_csv(str(file), sep=r"\s+")
metadata[source]["source"] = source
@@ -795,10 +795,10 @@ def compcor_variance_plot(
for m, (source, mask) in enumerate(decompositions):
components = metadata[(metadata["mask"] == mask) & (metadata["source"] == source)]
if len([m for s, m in decompositions if s == source]) > 1:
- title_mask = " ({} mask)".format(mask)
+ title_mask = f" ({mask} mask)"
else:
title_mask = ""
- fig_title = "{}{}".format(source, title_mask)
+ fig_title = f"{source}{title_mask}"
ax[m].plot(
np.arange(components.shape[0] + 1),
@@ -819,7 +819,7 @@ def compcor_variance_plot(
+ 1
)
ax[m].axhline(y=100 * thr, color="lightgrey", linewidth=0.25)
- ax[m].axvline(x=varexp[thr], color="C{}".format(i), linewidth=2, linestyle=":")
+ ax[m].axvline(x=varexp[thr], color=f"C{i}", linewidth=2, linestyle=":")
ax[m].text(
0,
100 * thr,
@@ -954,7 +954,7 @@ def confounds_correlation_plot(
)
ax1.set_xlabel("Confound time series")
- ax1.set_ylabel("Magnitude of correlation with {}".format(reference))
+ ax1.set_ylabel(f"Magnitude of correlation with {reference}")
ax1.tick_params(axis="x", which="both", width=0)
ax1.tick_params(axis="y", which="both", width=5, length=5)
diff --git a/niworkflows/viz/utils.py b/niworkflows/viz/utils.py
index c3ba7e668d9..5db238fdd82 100644
--- a/niworkflows/viz/utils.py
+++ b/niworkflows/viz/utils.py
@@ -269,7 +269,7 @@ def plot_segs(
image_nii, segs=seg_niis, compress=compress, **plot_params
)
# Find and replace the figure_1 id.
- svg = svg.replace("figure_1", "segmentation-%s-%s" % (d, uuid4()), 1)
+ svg = svg.replace("figure_1", f"segmentation-{d}-{uuid4()}", 1)
out_files.append(fromstring(svg))
return out_files
@@ -389,7 +389,7 @@ def plot_registration(
display.close()
# Find and replace the figure_1 id.
- svg = svg.replace("figure_1", "%s-%s-%s" % (div_id, mode, uuid4()), 1)
+ svg = svg.replace("figure_1", f"{div_id}-{mode}-{uuid4()}", 1)
out_files.append(fromstring(svg))
return out_files
@@ -631,7 +631,7 @@ def plot_melodic_components(
if noise_components.size == n_components:
ncomps = "ALL"
ax.annotate(
- "WARNING: {} components were classified as noise".format(ncomps),
+ f"WARNING: {ncomps} components were classified as noise",
xy=(0.0, 0.5),
xycoords="axes fraction",
xytext=(0.01, 0.5),