Skip to content

Commit

Permalink
Use f-strings, batch 1
Browse files Browse the repository at this point in the history
They are faster and usually more readable.
  • Loading branch information
DimitriPapadopoulos committed Nov 20, 2023
1 parent 963a044 commit a825b9d
Show file tree
Hide file tree
Showing 13 changed files with 131 additions and 159 deletions.
24 changes: 12 additions & 12 deletions capsul/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
_version_extra = version_extra

# Expected by setup.py: string of form "X.Y.Z"
__version__ = "{0}.{1}.{2}".format(version_major, version_minor, version_micro)
__version__ = f"{version_major}.{version_minor}.{version_micro}"

brainvisa_dependencies = [
"soma-base",
Expand Down Expand Up @@ -83,25 +83,25 @@
PROVIDES = ["capsul"]
REQUIRES = [
"redis <4.5.0",
"pydantic >={0}".format(PYDANTIC_MIN_VERSION),
"soma-base >={0}".format(SOMA_MIN_VERSION),
"soma-workflow >={0}".format(SOMA_WORKFLOW_MIN_VERSION),
"populse-db >={0}".format(POPULSE_DB_MIN_VERSION),
f"pydantic >={PYDANTIC_MIN_VERSION}",
f"soma-base >={SOMA_MIN_VERSION}",
f"soma-workflow >={SOMA_WORKFLOW_MIN_VERSION}",
f"populse-db >={POPULSE_DB_MIN_VERSION}",
"PyYAML",
]
EXTRA_REQUIRES = {
"test": ["pytest", "jupyter"],
"doc": [
"sphinx >=1.0",
"numpy >={0}".format(NUMPY_MIN_VERSION),
f"numpy >={NUMPY_MIN_VERSION}",
],
"nipype": [
"traits >={}".format(TRAITS_MIN_VERSION),
"numpy >={0}".format(NUMPY_MIN_VERSION),
"scipy >={0}".format(SCIPY_MIN_VERSION),
"nibabel >={0}".format(NIBABEL_MIN_VERSION),
"networkx >={0}".format(NETWORKX_MIN_VERSION),
"nipype =={0}".format(NIPYPE_VERSION),
f"traits >={TRAITS_MIN_VERSION}",
f"numpy >={NUMPY_MIN_VERSION}",
f"scipy >={SCIPY_MIN_VERSION}",
f"nibabel >={NIBABEL_MIN_VERSION}",
f"networkx >={NETWORKX_MIN_VERSION}",
f"nipype =={NIPYPE_VERSION}",
],
}

Expand Down
54 changes: 19 additions & 35 deletions capsul/pipeline/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -399,8 +399,7 @@ def add_process(
# Check the unicity of the name we want to insert
if name in self.nodes:
raise ValueError(
"Pipeline cannot have two nodes with the "
"same name : {0}".format(name)
f"Pipeline cannot have two nodes with the same name : {name}"
)

if skip_invalid:
Expand Down Expand Up @@ -464,22 +463,12 @@ def remove_node(self, node_name):
if not plug.output:
for link_def in list(plug.links_from):
src_node, src_plug = link_def[:2]
link_descr = "%s.%s->%s.%s" % (
src_node,
src_plug,
node_name,
plug_name,
)
link_descr = f"{src_node}.{src_plug}->{node_name}.{plug_name}"
self.remove_link(link_descr)
else:
for link_def in list(plug.links_to):
dst_node, dst_plug = link_def[:2]
link_descr = "%s.%s->%s.%s" % (
node_name,
plug_name,
dst_node,
dst_plug,
)
link_descr = f"{node_name}.{plug_name}->{dst_node}.{dst_plug}"
self.remove_link(link_descr)
del self.nodes[node_name]
self.nodes_activation.on_attribute_change.remove(
Expand Down Expand Up @@ -675,7 +664,7 @@ def add_switch(
# Check the unicity of the name we want to insert
if name in self.nodes:
raise ValueError(
"Pipeline cannot have two nodes with the same " "name: {0}".format(name)
f"Pipeline cannot have two nodes with the same name: {name}"
)

# Create the node
Expand Down Expand Up @@ -864,7 +853,7 @@ def parse_parameter(self, name, check=True):
node = None
plug = None
else:
raise ValueError("{0} is not a valid node name".format(node_name))
raise ValueError(f"{node_name} is not a valid node name")
plug_name = name[dot + 1 :]

# Check if plug nexists
Expand All @@ -884,11 +873,10 @@ def parse_parameter(self, name, check=True):
node.invalid_plugs.add(plug_name)
break
if err and check:
node_name = node_name or "pipeline"
raise ValueError(
"'{0}' is not a valid parameter name for "
"node '{1}'".format(
plug_name, (node_name if node_name else "pipeline")
)
f"'{node_name}' is not a valid parameter name for "
f"node '{node_name}'"
)
else:
plug = node.plugs[plug_name]
Expand Down Expand Up @@ -968,16 +956,16 @@ def add_link(self, link, weak_link=False, allow_export=False):

# Assure that pipeline plugs are not linked
if not source_plug.output and source_node is not self:
raise ValueError("Cannot link from an input plug: {0}".format(link))
raise ValueError(f"Cannot link from an input plug: {link}")
if source_plug.output and source_node is self:
raise ValueError(
"Cannot link from a pipeline output " "plug: {0}".format(link)
f"Cannot link from a pipeline output plug: {link}"
)
if dest_plug.output and dest_node is not self:
raise ValueError("Cannot link to an output plug: {0}".format(link))
raise ValueError(f"Cannot link to an output plug: {link}")
if not dest_plug.output and dest_node is self:
raise ValueError(
"Cannot link to a pipeline input " "plug: {0}".format(link)
f"Cannot link to a pipeline input plug: {link}"
)

# Propagate the plug value from source to destination
Expand Down Expand Up @@ -1930,10 +1918,10 @@ def pipeline_state(self):
)
plugs_list.append((plug_name, plug_dict))
for nn, pn, n, p, weak_link in plug.links_to:
link_name = "%s:%s" % (n.full_name, pn)
link_name = f"{n.full_name}:{pn}"
links_to_dict[link_name] = weak_link
for nn, pn, n, p, weak_link in plug.links_from:
link_name = "%s:%s" % (n.full_name, pn)
link_name = f"{n.full_name}:{pn}"
links_from_dict[link_name] = weak_link
return result

Expand All @@ -1952,28 +1940,24 @@ def compare_to_state(self, pipeline_state):
def compare_dict(ref_dict, other_dict):
for ref_key, ref_value in ref_dict.items():
if ref_key not in other_dict:
yield "%s = %s is missing" % (ref_key, repr(ref_value))
yield f"{ref_key} = {ref_value!r} is missing"
else:
other_value = other_dict.pop(ref_key)
if ref_value != other_value:
yield "%s = %s differs from %s" % (
ref_key,
repr(ref_value),
repr(other_value),
)
yield f"{ref_key} = {ref_value!r} differs from {other_value!r}"
for other_key, other_value in other_dict.items():
yield "%s=%s is new" % (other_key, repr(other_value))
yield f"{other_key}={other_value!r} is new"

pipeline_state = deepcopy(pipeline_state)
for node in self.all_nodes():
node_name = node.full_name
node_dict = pipeline_state.pop(node_name, None)
if node_dict is None:
result.append('node "%s" is missing' % node_name)
result.append(f'node "{node_name}" is missing')
else:
plugs_list = OrderedDict(node_dict.pop("plugs"))
result.extend(
'in node "%s": %s' % (node_name, i)
f'in node "{node_name}": {i}'
for i in compare_dict(
dict(
name=node.name,
Expand Down
4 changes: 2 additions & 2 deletions capsul/pipeline/pipeline_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ def __init__(
raise Exception(
"The Switch node input and output parameters "
"are inconsistent: expect list, "
"got {0}, {1}".format(type(inputs), type(outputs))
f"got {type(inputs)}, {type(outputs)}"
)

# private copy of outputs and inputs
Expand All @@ -147,7 +147,7 @@ def __init__(
for switch_name in inputs:
flat_inputs.extend(
[
"{0}_switch_{1}".format(switch_name, plug_name)
f"{switch_name}_switch_{plug_name}"
for plug_name in outputs
]
)
Expand Down
12 changes: 6 additions & 6 deletions capsul/pipeline/pipeline_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,13 +524,13 @@ def _str_repr(item):
)
if len(props) != 0:
attstr = " " + attstr
fileobj.write(' %s [label="%s" style="filled"%s];\n' % (id, node, attstr))
fileobj.write(f' {id} [label="{node}" style="filled"{attstr}];\n')
for edge, descr in dot_graph[1].items():
props = descr[0]
attstr = " ".join(
["=".join([aname, _str_repr(val)]) for aname, val in props.items()]
)
fileobj.write(' "%s" -> "%s" [%s];\n' % (edge[0], edge[1], attstr))
fileobj.write(f' "{edge[0]}" -> "{edge[1]}" [{attstr}];\n')
fileobj.write("}\n")


Expand Down Expand Up @@ -734,7 +734,7 @@ def nodes_with_existing_outputs(
process = node
if recursive and isinstance(process, Pipeline):
nodes += [
("%s.%s" % (node_name, new_name), new_node)
(f"{node_name}.{new_name}", new_node)
for new_name, new_node in process.nodes.items()
if new_name != ""
]
Expand Down Expand Up @@ -1392,11 +1392,11 @@ def __repr__(self):
meta["optional"] = True
meta_str = ""
if meta:
meta_str = ", ".join("%s=%s" % (k, repr(v)) for k, v in meta.items())
meta_str = ", ".join(f"{k}={v!r}" for k, v in meta.items())
meta_str = ", " + meta_str
f.write(' self.add_field("%s", %s%s)\n' % (name, t_str, meta_str))
f.write(f' self.add_field("{name}", {t_str}{meta_str})\n')
if value is not undefined:
f.write(" self.%s = %s\n" % (name, repr(value)))
f.write(f" self.{name} = {value!r}\n")

f.write(
"""
Expand Down
2 changes: 1 addition & 1 deletion capsul/pipeline/process_iteration.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ def iteration_size(self):
raise ValueError(
"Iterative parameter values must be lists of the same size: %s"
% "\n".join(
"%s=%s" % (n, len(getattr(self, n)))
f"{n}={len(getattr(self, n))}"
for n in self.iterative_parameters
if getattr(self, n) is not undefined
)
Expand Down
Loading

0 comments on commit a825b9d

Please sign in to comment.