From 6946bdadeb3e1784eeba8cce8e963b88a79f4b02 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Sun, 19 Nov 2023 23:16:02 +0100 Subject: [PATCH 1/2] Use f-strings, batch 1 They are faster and usually more readable. --- capsul/info.py | 24 +++--- capsul/pipeline/pipeline.py | 54 +++++-------- capsul/pipeline/pipeline_nodes.py | 4 +- capsul/pipeline/pipeline_tools.py | 12 +-- capsul/pipeline/process_iteration.py | 2 +- capsul/pipeline/python_export.py | 76 +++++++++---------- .../test/test_complex_pipeline_activations.py | 8 +- capsul/pipeline/topological_sort.py | 12 +-- capsul/qt_apps/utils/application.py | 2 +- capsul/qt_apps/utils/find_pipelines.py | 10 +-- capsul/qt_gui/widgets/activation_inspector.py | 21 +++-- .../qt_gui/widgets/pipeline_developer_view.py | 13 ++-- capsul/schemas/brainvisa.py | 52 ++++++------- 13 files changed, 131 insertions(+), 159 deletions(-) diff --git a/capsul/info.py b/capsul/info.py index 42cf8c81a..99b20ba98 100644 --- a/capsul/info.py +++ b/capsul/info.py @@ -18,7 +18,7 @@ _version_extra = version_extra # Expected by setup.py: string of form "X.Y.Z" -__version__ = "{0}.{1}.{2}".format(version_major, version_minor, version_micro) +__version__ = f"{version_major}.{version_minor}.{version_micro}" brainvisa_dependencies = [ "soma-base", @@ -83,25 +83,25 @@ PROVIDES = ["capsul"] REQUIRES = [ "redis <4.5.0", - "pydantic >={0}".format(PYDANTIC_MIN_VERSION), - "soma-base >={0}".format(SOMA_MIN_VERSION), - "soma-workflow >={0}".format(SOMA_WORKFLOW_MIN_VERSION), - "populse-db >={0}".format(POPULSE_DB_MIN_VERSION), + f"pydantic >={PYDANTIC_MIN_VERSION}", + f"soma-base >={SOMA_MIN_VERSION}", + f"soma-workflow >={SOMA_WORKFLOW_MIN_VERSION}", + f"populse-db >={POPULSE_DB_MIN_VERSION}", "PyYAML", ] EXTRA_REQUIRES = { "test": ["pytest", "jupyter"], "doc": [ "sphinx >=1.0", - "numpy >={0}".format(NUMPY_MIN_VERSION), + f"numpy >={NUMPY_MIN_VERSION}", ], "nipype": [ - "traits >={}".format(TRAITS_MIN_VERSION), - "numpy >={0}".format(NUMPY_MIN_VERSION), - "scipy >={0}".format(SCIPY_MIN_VERSION), - "nibabel >={0}".format(NIBABEL_MIN_VERSION), - "networkx >={0}".format(NETWORKX_MIN_VERSION), - "nipype =={0}".format(NIPYPE_VERSION), + f"traits >={TRAITS_MIN_VERSION}", + f"numpy >={NUMPY_MIN_VERSION}", + f"scipy >={SCIPY_MIN_VERSION}", + f"nibabel >={NIBABEL_MIN_VERSION}", + f"networkx >={NETWORKX_MIN_VERSION}", + f"nipype =={NIPYPE_VERSION}", ], } diff --git a/capsul/pipeline/pipeline.py b/capsul/pipeline/pipeline.py index ccc7c31c0..67d5fb422 100644 --- a/capsul/pipeline/pipeline.py +++ b/capsul/pipeline/pipeline.py @@ -399,8 +399,7 @@ def add_process( # Check the unicity of the name we want to insert if name in self.nodes: raise ValueError( - "Pipeline cannot have two nodes with the " - "same name : {0}".format(name) + f"Pipeline cannot have two nodes with the same name : {name}" ) if skip_invalid: @@ -464,22 +463,12 @@ def remove_node(self, node_name): if not plug.output: for link_def in list(plug.links_from): src_node, src_plug = link_def[:2] - link_descr = "%s.%s->%s.%s" % ( - src_node, - src_plug, - node_name, - plug_name, - ) + link_descr = f"{src_node}.{src_plug}->{node_name}.{plug_name}" self.remove_link(link_descr) else: for link_def in list(plug.links_to): dst_node, dst_plug = link_def[:2] - link_descr = "%s.%s->%s.%s" % ( - node_name, - plug_name, - dst_node, - dst_plug, - ) + link_descr = f"{node_name}.{plug_name}->{dst_node}.{dst_plug}" self.remove_link(link_descr) del self.nodes[node_name] self.nodes_activation.on_attribute_change.remove( @@ -675,7 +664,7 @@ def add_switch( # Check the unicity of the name we want to insert if name in self.nodes: raise ValueError( - "Pipeline cannot have two nodes with the same " "name: {0}".format(name) + f"Pipeline cannot have two nodes with the same name: {name}" ) # Create the node @@ -864,7 +853,7 @@ def parse_parameter(self, name, check=True): node = None plug = None else: - raise ValueError("{0} is not a valid node name".format(node_name)) + raise ValueError(f"{node_name} is not a valid node name") plug_name = name[dot + 1 :] # Check if plug nexists @@ -884,11 +873,10 @@ def parse_parameter(self, name, check=True): node.invalid_plugs.add(plug_name) break if err and check: + node_name = node_name or "pipeline" raise ValueError( - "'{0}' is not a valid parameter name for " - "node '{1}'".format( - plug_name, (node_name if node_name else "pipeline") - ) + f"'{node_name}' is not a valid parameter name for " + f"node '{node_name}'" ) else: plug = node.plugs[plug_name] @@ -968,16 +956,16 @@ def add_link(self, link, weak_link=False, allow_export=False): # Assure that pipeline plugs are not linked if not source_plug.output and source_node is not self: - raise ValueError("Cannot link from an input plug: {0}".format(link)) + raise ValueError(f"Cannot link from an input plug: {link}") if source_plug.output and source_node is self: raise ValueError( - "Cannot link from a pipeline output " "plug: {0}".format(link) + f"Cannot link from a pipeline output plug: {link}" ) if dest_plug.output and dest_node is not self: - raise ValueError("Cannot link to an output plug: {0}".format(link)) + raise ValueError(f"Cannot link to an output plug: {link}") if not dest_plug.output and dest_node is self: raise ValueError( - "Cannot link to a pipeline input " "plug: {0}".format(link) + f"Cannot link to a pipeline input plug: {link}" ) # Propagate the plug value from source to destination @@ -1930,10 +1918,10 @@ def pipeline_state(self): ) plugs_list.append((plug_name, plug_dict)) for nn, pn, n, p, weak_link in plug.links_to: - link_name = "%s:%s" % (n.full_name, pn) + link_name = f"{n.full_name}:{pn}" links_to_dict[link_name] = weak_link for nn, pn, n, p, weak_link in plug.links_from: - link_name = "%s:%s" % (n.full_name, pn) + link_name = f"{n.full_name}:{pn}" links_from_dict[link_name] = weak_link return result @@ -1952,28 +1940,24 @@ def compare_to_state(self, pipeline_state): def compare_dict(ref_dict, other_dict): for ref_key, ref_value in ref_dict.items(): if ref_key not in other_dict: - yield "%s = %s is missing" % (ref_key, repr(ref_value)) + yield f"{ref_key} = {ref_value!r} is missing" else: other_value = other_dict.pop(ref_key) if ref_value != other_value: - yield "%s = %s differs from %s" % ( - ref_key, - repr(ref_value), - repr(other_value), - ) + yield f"{ref_key} = {ref_value!r} differs from {other_value!r}" for other_key, other_value in other_dict.items(): - yield "%s=%s is new" % (other_key, repr(other_value)) + yield f"{other_key}={other_value!r} is new" pipeline_state = deepcopy(pipeline_state) for node in self.all_nodes(): node_name = node.full_name node_dict = pipeline_state.pop(node_name, None) if node_dict is None: - result.append('node "%s" is missing' % node_name) + result.append(f'node "{node_name}" is missing') else: plugs_list = OrderedDict(node_dict.pop("plugs")) result.extend( - 'in node "%s": %s' % (node_name, i) + f'in node "{node_name}": {i}' for i in compare_dict( dict( name=node.name, diff --git a/capsul/pipeline/pipeline_nodes.py b/capsul/pipeline/pipeline_nodes.py index 5e1516bd0..b58af6d22 100644 --- a/capsul/pipeline/pipeline_nodes.py +++ b/capsul/pipeline/pipeline_nodes.py @@ -135,7 +135,7 @@ def __init__( raise Exception( "The Switch node input and output parameters " "are inconsistent: expect list, " - "got {0}, {1}".format(type(inputs), type(outputs)) + f"got {type(inputs)}, {type(outputs)}" ) # private copy of outputs and inputs @@ -147,7 +147,7 @@ def __init__( for switch_name in inputs: flat_inputs.extend( [ - "{0}_switch_{1}".format(switch_name, plug_name) + f"{switch_name}_switch_{plug_name}" for plug_name in outputs ] ) diff --git a/capsul/pipeline/pipeline_tools.py b/capsul/pipeline/pipeline_tools.py index f064f612d..87d6b9d54 100644 --- a/capsul/pipeline/pipeline_tools.py +++ b/capsul/pipeline/pipeline_tools.py @@ -524,13 +524,13 @@ def _str_repr(item): ) if len(props) != 0: attstr = " " + attstr - fileobj.write(' %s [label="%s" style="filled"%s];\n' % (id, node, attstr)) + fileobj.write(f' {id} [label="{node}" style="filled"{attstr}];\n') for edge, descr in dot_graph[1].items(): props = descr[0] attstr = " ".join( ["=".join([aname, _str_repr(val)]) for aname, val in props.items()] ) - fileobj.write(' "%s" -> "%s" [%s];\n' % (edge[0], edge[1], attstr)) + fileobj.write(f' "{edge[0]}" -> "{edge[1]}" [{attstr}];\n') fileobj.write("}\n") @@ -734,7 +734,7 @@ def nodes_with_existing_outputs( process = node if recursive and isinstance(process, Pipeline): nodes += [ - ("%s.%s" % (node_name, new_name), new_node) + (f"{node_name}.{new_name}", new_node) for new_name, new_node in process.nodes.items() if new_name != "" ] @@ -1392,11 +1392,11 @@ def __repr__(self): meta["optional"] = True meta_str = "" if meta: - meta_str = ", ".join("%s=%s" % (k, repr(v)) for k, v in meta.items()) + meta_str = ", ".join(f"{k}={v!r}" for k, v in meta.items()) meta_str = ", " + meta_str - f.write(' self.add_field("%s", %s%s)\n' % (name, t_str, meta_str)) + f.write(f' self.add_field("{name}", {t_str}{meta_str})\n') if value is not undefined: - f.write(" self.%s = %s\n" % (name, repr(value))) + f.write(f" self.{name} = {value!r}\n") f.write( """ diff --git a/capsul/pipeline/process_iteration.py b/capsul/pipeline/process_iteration.py index 5b40f1702..a16a654a7 100644 --- a/capsul/pipeline/process_iteration.py +++ b/capsul/pipeline/process_iteration.py @@ -133,7 +133,7 @@ def iteration_size(self): raise ValueError( "Iterative parameter values must be lists of the same size: %s" % "\n".join( - "%s=%s" % (n, len(getattr(self, n))) + f"{n}={len(getattr(self, n))}" for n in self.iterative_parameters if getattr(self, n) is not undefined ) diff --git a/capsul/pipeline/python_export.py b/capsul/pipeline/python_export.py index 28e8afab4..304eae5fd 100644 --- a/capsul/pipeline/python_export.py +++ b/capsul/pipeline/python_export.py @@ -66,21 +66,21 @@ def _write_process(process, pyf, name, enabled, skip_invalid): make_opt.append(fname) node_options = "" if len(make_opt) != 0: - node_options += ", make_optional=%s" % repr(make_opt) + node_options += f", make_optional={make_opt!r}" if skip_invalid: node_options += ", skip_invalid=True" print( - ' self.add_process("%s", "%s"%s)' % (name, procname, node_options), + f' self.add_process("{name}", "{procname}"{node_options})', file=pyf, ) # check that sub-nodes enable and plugs optional states are the # expected ones - todo = [('self.nodes["%s"]' % name, process, proc_copy)] + todo = [(f'self.nodes["{name}"]', process, proc_copy)] while todo: self_str, snode, cnode = todo.pop(0) if not snode.enabled: - print(" %s.enabled = False" % self_str, file=pyf) + print(f" {self_str}.enabled = False", file=pyf) # if the node is a (sub)pipeline, and this pipeline has additional # exported traits compared to the its base module/class instance @@ -178,20 +178,18 @@ def _write_custom_node(node, pyf, name, enabled): (p, v) for p, v in c.asdict().items() if v not in (None, undefined) ) print( - ' self.add_custom_node("%s", "%s", %s)' - % (name, nodename, get_repr_value(params)), + f' self.add_custom_node("{name}", "{nodename}", {get_repr_value(params)})', file=pyf, ) else: print( - ' self.add_custom_node("%s", "%s")' % (name, nodename), file=pyf + f' self.add_custom_node("{name}", "{nodename}")', file=pyf ) # optional plugs for plug_name, plug in node.plugs.items(): if plug.optional: print( - ' self.nodes["%s"].plugs["%s"].optional = True' - % (name, plug_name), + f' self.nodes["{name}"].plugs["{plug_name}"].optional = True', file=pyf, ) # non-default: values of unconnected plugs @@ -205,8 +203,7 @@ def _write_custom_node(node, pyf, name, enabled): ): value = getattr(node, plug_name, undefined) print( - ' self.nodes["%s"].%s = %s' - % (name, plug_name, get_repr_value(value)), + f' self.nodes["{name}"].{plug_name} = {get_repr_value(value)}', file=pyf, ) @@ -229,8 +226,8 @@ def _write_iteration(process_iter, pyf, name, enabled): iteration_params = ", ".join(process_iter.iterative_parameters) # TODO: optional plugs, non-exported plugs... print( - ' self.add_iterative_process("%s", "%s", ' - "iterative_plugs=%s)" % (name, procname, process_iter.iterative_parameters), + f' self.add_iterative_process("{name}", "{procname}", ' + f"iterative_plugs={process_iter.iterative_parameters})", file=pyf, ) @@ -265,12 +262,16 @@ def _write_switch(switch, pyf, name): inputs.append(name_parts[0]) optional_p = "" if len(optional) != 0: - optional_p = ", make_optional=%s" % repr(optional) + optional_p = f", make_optional={optional!r}" - # print(f' self.create_switch({repr(name)}, {repr(options)}, switch_value={repr(switch.switch)}, export_switch=False)', file=pyf) + # print( + # f' self.create_switch({repr(name)}, {repr(options)}, ' + # f'switch_value={repr(switch.switch)}, export_switch=False)', + # file=pyf + # ) print( - f" self.add_switch({repr(name)}, {repr(inputs)}, " - f"{repr(outputs)}{optional_p}, export_switch=False)", + f" self.add_switch({name!r}, {inputs!r}, " + f"{outputs!r}{optional_p}, export_switch=False)", file=pyf, ) @@ -309,8 +310,7 @@ def _write_processes_selections(pipeline, pyf): print("\n # processes selection", file=pyf) for selector_name, groups in pipeline.processes_selection.items(): print( - ' self.add_processes_selection("%s", %s)' - % (selector_name, repr(groups)), + f' self.add_processes_selection("{selector_name}", {groups!r})', file=pyf, ) return selection_parameters @@ -326,14 +326,13 @@ def _write_export(pipeline, pyf, param_name): if param_name == plug_name: param_name = "" else: - param_name = ', "%s"' % param_name + param_name = f', "{param_name}"' weak_link = "" if link[-1]: weak_link = ", weak_link=True" - is_optional = ", is_optional=%s" % repr(field.optional) + is_optional = f", is_optional={field.optional!r}" print( - ' self.export_parameter("%s", "%s"%s%s%s)' - % (node_name, plug_name, param_name, weak_link, is_optional), + f' self.export_parameter("{node_name}", "{plug_name}"{param_name}{weak_link}{is_optional})', file=pyf, ) return node_name, plug_name @@ -355,14 +354,14 @@ def _write_links(pipeline, pyf): exported_plug = _write_export(pipeline, pyf, src) exported.add(src) else: - src = "%s.%s" % (node_name, plug_name) + src = f"{node_name}.{plug_name}" if link[0] == "": dst = link[1] if dst not in exported: exported_plug = _write_export(pipeline, pyf, dst) exported.add(dst) else: - dst = "%s.%s" % (link[0], link[1]) + dst = f"{link[0]}.{link[1]}" if not exported_plug or ".".join(exported_plug) not in ( src, dst, @@ -371,8 +370,7 @@ def _write_links(pipeline, pyf): if link[-1]: weak_link = ", weak_link=True" print( - ' self.add_link("%s->%s"%s)' - % (src, dst, weak_link), + f' self.add_link("{src}->{dst}"{weak_link})', file=pyf, ) @@ -382,7 +380,7 @@ def _write_param_order(pipeline, pyf): return print("\n # parameters order", file=pyf) names = [ - '"%s"' % n.name + f'"{n.name}"' for n in user_fields if n.name not in ("nodes_activation", "pipeline_steps", "visible_groups") ] @@ -403,8 +401,7 @@ def _write_steps(pipeline, pyf): enabled_str = ", enabled=false" nodes = step.metadata("nodes", set()) print( - ' self.add_pipeline_step("%s", %s%s)' - % (step_name, repr(nodes), enabled_str), + f' self.add_pipeline_step("{step_name}", {nodes!r}{enabled_str})', file=pyf, ) @@ -417,12 +414,11 @@ def _write_nodes_positions(pipeline, pyf): if not isinstance(pos, (list, tuple)): # pos is probably a QPointF pos = (pos.x(), pos.y()) - print(' "%s": %s,' % (node_name, repr(pos)), file=pyf) + print(f' "{node_name}": {pos!r},', file=pyf) print(" }", file=pyf) if hasattr(pipeline, "scene_scale_factor"): print( - " self.scene_scale_factor = %s" - % repr(pipeline.scene_scale_factor), + f" self.scene_scale_factor = {pipeline.scene_scale_factor!r}", file=pyf, ) @@ -435,7 +431,7 @@ def _write_nodes_dimensions(pipeline, pyf): for node_name, dim in pipeline.node_dimension.items(): if not isinstance(dim, (list, tuple)): dim = (dim.width(), dim.height()) - print(' "%s": %s,' % (node_name, repr(dim)), file=pyf) + print(f' "{node_name}": {dim!r},', file=pyf) print(" }", file=pyf) ###################################################### @@ -456,9 +452,7 @@ def _write_doc(pipeline, pyf): for i in notepos: if ( splitdoc[i + 2].find( - "* Type '{0}.help()'".format( - pipeline.__class__.__name__ - ) + f"* Type '{pipeline.__class__.__name__}.help()'" ) != -1 ): @@ -473,7 +467,7 @@ def _write_doc(pipeline, pyf): if docstr: doc = docstr.split("\n") docstr = "\n".join([repr(x)[1:-1] for x in doc]) - print(' """%s """' % docstr, file=pyf) + print(f' """{docstr} """', file=pyf) def _write_values(pipeline, pyf): first = True @@ -490,12 +484,12 @@ def _write_values(pipeline, pyf): try: eval(value_repr) except Exception: - print("warning, value of parameter %s cannot be saved" % param_name) + print(f"warning, value of parameter {param_name} cannot be saved") continue if first: first = False print("\n # default and initial values", file=pyf) - print(" self.%s = %s" % (param_name, value_repr), file=pyf) + print(f" self.{param_name} = {value_repr}", file=pyf) class_name = type(pipeline).__name__ if class_name == "Pipeline": @@ -510,7 +504,7 @@ def _write_values(pipeline, pyf): print("from soma.controller import undefined", file=pyf) print(file=pyf) print(file=pyf) - print("class %s(Pipeline):" % class_name, file=pyf) + print(f"class {class_name}(Pipeline):", file=pyf) _write_doc(pipeline, pyf) diff --git a/capsul/pipeline/test/test_complex_pipeline_activations.py b/capsul/pipeline/test/test_complex_pipeline_activations.py index 309724afc..1723f01ed 100644 --- a/capsul/pipeline/test/test_complex_pipeline_activations.py +++ b/capsul/pipeline/test/test_complex_pipeline_activations.py @@ -859,9 +859,7 @@ def test_complex_activations(self): node = node_pipeline.nodes[node_name] except KeyError: raise KeyError( - "Pipeline {0} has no node named {1}".format( - node_pipeline.pipeline, node_name - ) + f"Pipeline {node_pipeline.pipeline} has no node named {node_name}" ) try: what = "activation of node {0}".format( @@ -880,9 +878,7 @@ def test_complex_activations(self): self.assertEqual(expected, got) except AssertionError: raise AssertionError( - "Wrong activation within ComplexPipeline with parameters {0}: {1} is supposed to be {2} but is {3}".format( - kwargs, what, expected, got - ) + f"Wrong activation within ComplexPipeline with parameters {kwargs}: {what} is supposed to be {expected} but is {got}" ) diff --git a/capsul/pipeline/topological_sort.py b/capsul/pipeline/topological_sort.py index 6bf6be343..e7e5f57a3 100644 --- a/capsul/pipeline/topological_sort.py +++ b/capsul/pipeline/topological_sort.py @@ -139,10 +139,10 @@ def add_node(self, node): the node to insert """ if not isinstance(node, GraphNode): - raise Exception("Expect a GraphNode, got {0}".format(node)) + raise Exception(f"Expect a GraphNode, got {node}") if node.name in self._nodes: raise Exception( - "Expect a GraphNode with a unique name, " "got {0}".format(node) + f"Expect a GraphNode with a unique name, got {node}" ) self._nodes[node.name] = node @@ -170,13 +170,13 @@ def add_link(self, from_node, to_node): """ if from_node not in self._nodes: raise Exception( - "Node {0} is not defined in the Graph." - "Use add_node() method".format(from_node) + f"Node {from_node} is not defined in the Graph." + "Use add_node() method" ) if to_node not in self._nodes: raise Exception( - "Node {0} is not defined in the Graph." - "Use add_node() method".format(to_node) + f"Node {to_node} is not defined in the Graph." + "Use add_node() method" ) if (from_node, to_node) not in self._links: self._nodes[to_node].add_link_from(self._nodes[from_node]) diff --git a/capsul/qt_apps/utils/application.py b/capsul/qt_apps/utils/application.py index 2d1096543..adf191435 100644 --- a/capsul/qt_apps/utils/application.py +++ b/capsul/qt_apps/utils/application.py @@ -116,7 +116,7 @@ def __init__(self, extra_options=None): # If a no valid logging level is found raise an Exception if level is None: raise Exception( - "Warning : unknown logging level " "{0}".format(self.options.debug) + f"Warning : unknown logging level {self.options.debug}" ) # Configure the logging module diff --git a/capsul/qt_apps/utils/find_pipelines.py b/capsul/qt_apps/utils/find_pipelines.py index a6f51791e..784ea0674 100644 --- a/capsul/qt_apps/utils/find_pipelines.py +++ b/capsul/qt_apps/utils/find_pipelines.py @@ -49,7 +49,7 @@ def find_pipelines_from_description(module_name, url=None): try: __import__(module_name) except ImportError: - logger.error("Can't load module {0}".format(module_name)) + logger.error("Can't load module %s", module_name) return {}, [] # Get the module path @@ -57,7 +57,7 @@ def find_pipelines_from_description(module_name, url=None): module_path = module.__path__[0] # Build the expected pipeline description file - description_file = os.path.join(module_path, "{0}.capsul".format(module_name)) + description_file = os.path.join(module_path, f"{module_name}.capsul") # Load the description file if os.path.isfile(description_file): @@ -98,7 +98,7 @@ def find_pipeline_and_process(module_name): try: __import__(module_name) except ImportError: - logger.error("Can't load module {0}".format(module_name)) + logger.error(f"Can't load module {module_name}") return {}, [] # Get the module path @@ -112,7 +112,7 @@ def find_pipeline_and_process(module_name): sub_modules = find_packages(where=module_path, exclude=("doc",)) sub_modules = [module_name + "." + x for x in sub_modules] sub_modules.insert(0, module_name) - logger.debug("Modules found with setuptools: '{0}'.".format(sub_modules)) + logger.debug(f"Modules found with setuptools: '{sub_modules}'.") # Shift shift = len(module_name.split(".")) @@ -137,7 +137,7 @@ def find_pipeline_and_process(module_name): except ImportError: exc_info = sys.exc_info() logger.error("".join(traceback.format_exception(*exc_info))) - logger.error("Can't load module " "{0}".format(sub_sub_module_name)) + logger.error(f"Can't load module {sub_sub_module_name}") continue # Get the module diff --git a/capsul/qt_gui/widgets/activation_inspector.py b/capsul/qt_gui/widgets/activation_inspector.py index 96b314fb8..a30685810 100644 --- a/capsul/qt_gui/widgets/activation_inspector.py +++ b/capsul/qt_gui/widgets/activation_inspector.py @@ -160,8 +160,8 @@ def __del__(self): self.pipeline._debug_activations = self.record_file else: raise ValueError( - "The record file '{0}' can't be created since the " - "base directory does not exists.".format(self.record_file) + f"The record file '{self.record_file}' can't be created " + "since the base directory does not exist." ) # Execute the pipeline activation method @@ -216,10 +216,9 @@ def refresh_activation_from_record(self): record_pipeline_id = openrecord.readline().strip() if record_pipeline_id != str(self.pipeline.definition): raise ValueError( - "'{0}' recorded activations for pipeline '{1}' but not for " - "'{2}'".format( - self.record_file, record_pipeline_id, self.pipeline.definition - ) + f"'{self.record_file}' recorded activations for pipeline " + f"'{record_pipeline_id}' but not for " + f"'{self.pipeline.definition}'" ) # Clear the list where the recorded activation is displayed @@ -243,14 +242,14 @@ def refresh_activation_from_record(self): # > Store the current activation stack if activation == "+": - current_activations["{0}:{1}".format(node, plug)] = True + current_activations[f"{node}:{plug}"] = True else: - del current_activations["{0}:{1}".format(node, plug)] + del current_activations[f"{node}:{plug}"] self.activations.append(current_activations.copy()) # > Add a line to the activation display self.ui.events.addItem( - "{0}{1} {2}:{3}".format(iteration, activation, node, plug) + f"{iteration}{activation} {node}:{plug}" ) # Select the last activation step so the pipeline will be @@ -272,9 +271,9 @@ def update_pipeline_activation(self, index): node_name = node.full_name for plug_name, plug in node.plugs.items(): plug.activated = activations.get( - "{0}:{1}".format(node_name, plug_name), False + f"{node_name}:{plug_name}", False ) - node.activated = activations.get("{0}:".format(node_name), False) + node.activated = activations.get(f"{node_name}:", False) # Refresh views relying on plugs and nodes selection for node in self.pipeline.all_nodes(): diff --git a/capsul/qt_gui/widgets/pipeline_developer_view.py b/capsul/qt_gui/widgets/pipeline_developer_view.py index d0c2c25fe..65710e727 100644 --- a/capsul/qt_gui/widgets/pipeline_developer_view.py +++ b/capsul/qt_gui/widgets/pipeline_developer_view.py @@ -377,7 +377,7 @@ def get_title(self): if self.sub_pipeline is None: return self.name else: - return "[{0}]".format(self.name) + return f"[{self.name}]" def update_parameters(self): self._update_param_timer.start(20) @@ -2926,8 +2926,7 @@ def ensure_pipeline(self, pipeline): ) # add by Irmage OM else: raise Exception( - "Expect a Pipeline or a Process, not a " - "'{0}'.".format(repr(pipeline)) + f"Expect a Pipeline or a Process, not a '{pipeline!r}'." ) return pipeline @@ -5248,20 +5247,20 @@ def hinted_tuple_hook(obj): if "pipeline_parameters" not in list(dic.keys()): raise KeyError( - 'No "pipeline_parameters" key found in {0}.'.format(filename) + f'No "pipeline_parameters" key found in {filename}.' ) for field_name, field_value in dic["pipeline_parameters"].items(): if field_name not in [ field.name for field in self.scene.pipeline.fields() ]: - print('No "{0}" parameter in pipeline.'.format(field_name)) + print(f'No "{field_name}" parameter in pipeline.') try: setattr(self.scene.pipeline, field_name, field_value) except dataclasses.ValidationError: - print("Error for the plug {0}".format(field_name)) + print(f"Error for the plug {field_name}") self.scene.pipeline.update_nodes_and_plugs_activation() @@ -5310,7 +5309,7 @@ def hint_tuples(item): msg.setIcon(QMessageBox.Warning) msg.setText( 'The parameters must be saved in the ".json" format, ' - 'not the "{0}" format'.format(os.path.splitext(filename)[1]) + f'not the "{os.path.splitext(filename)[1]}" format' ) msg.setWindowTitle("Warning") msg.setStandardButtons(QMessageBox.Ok) diff --git a/capsul/schemas/brainvisa.py b/capsul/schemas/brainvisa.py index fed1073a4..37c95260e 100644 --- a/capsul/schemas/brainvisa.py +++ b/capsul/schemas/brainvisa.py @@ -172,55 +172,55 @@ def declare_morpho_schemas(morpho_module): """ axon_module = morpho_module - cnn_module = "{}.sulcideeplabeling".format(morpho_module) + cnn_module = f"{morpho_module}.sulcideeplabeling" if morpho_module.startswith("morphologist."): - axon_module = "{}.axon".format(morpho_module) + axon_module = f"{morpho_module}.axon" cnn_module = "deepsulci.sulci_labeling.capsul.labeling" - morphologist = importlib.import_module("{}.morphologist".format(morpho_module)) + morphologist = importlib.import_module(f"{morpho_module}.morphologist") normalization_t1_spm12_reinit = importlib.import_module( - "{}.normalization_t1_spm12_reinit".format(axon_module) + f"{axon_module}.normalization_t1_spm12_reinit" ) normalization_t1_spm8_reinit = importlib.import_module( - "{}.normalization_t1_spm8_reinit".format(axon_module) + f"{axon_module}.normalization_t1_spm8_reinit" ) normalization_aimsmiregister = importlib.import_module( - "{}.normalization_aimsmiregister".format(axon_module) + f"{axon_module}.normalization_aimsmiregister" ) normalization_fsl_reinit = importlib.import_module( - "{}.normalization_fsl_reinit".format(axon_module) + f"{axon_module}.normalization_fsl_reinit" ) t1biascorrection = importlib.import_module( - "{}.t1biascorrection".format(axon_module) + f"{axon_module}.t1biascorrection" ) - histoanalysis = importlib.import_module("{}.histoanalysis".format(axon_module)) + histoanalysis = importlib.import_module(f"{axon_module}.histoanalysis") brainsegmentation = importlib.import_module( - "{}.brainsegmentation".format(axon_module) + f"{axon_module}.brainsegmentation" ) - skullstripping = importlib.import_module("{}.skullstripping".format(axon_module)) - scalpmesh = importlib.import_module("{}.scalpmesh".format(axon_module)) - splitbrain = importlib.import_module("{}.splitbrain".format(axon_module)) + skullstripping = importlib.import_module(f"{axon_module}.skullstripping") + scalpmesh = importlib.import_module(f"{axon_module}.scalpmesh") + splitbrain = importlib.import_module(f"{axon_module}.splitbrain") greywhiteclassificationhemi = importlib.import_module( - "{}.greywhiteclassificationhemi".format(axon_module) + f"{axon_module}.greywhiteclassificationhemi" ) greywhitetopology = importlib.import_module( - "{}.greywhitetopology".format(axon_module) + f"{axon_module}.greywhitetopology" ) - greywhitemesh = importlib.import_module("{}.greywhitemesh".format(axon_module)) - pialmesh = importlib.import_module("{}.pialmesh".format(axon_module)) - sulciskeleton = importlib.import_module("{}.sulciskeleton".format(axon_module)) - sulcigraph = importlib.import_module("{}.sulcigraph".format(axon_module)) + greywhitemesh = importlib.import_module(f"{axon_module}.greywhitemesh") + pialmesh = importlib.import_module(f"{axon_module}.pialmesh") + sulciskeleton = importlib.import_module(f"{axon_module}.sulciskeleton") + sulcigraph = importlib.import_module(f"{axon_module}.sulcigraph") sulcilabellingann = importlib.import_module( - "{}.sulcilabellingann".format(axon_module) + f"{axon_module}.sulcilabellingann" ) sulcilabellingspamglobal = importlib.import_module( - "{}.sulcilabellingspamglobal".format(axon_module) + f"{axon_module}.sulcilabellingspamglobal" ) sulcilabellingspamlocal = importlib.import_module( - "{}.sulcilabellingspamlocal".format(axon_module) + f"{axon_module}.sulcilabellingspamlocal" ) sulcilabellingspammarkov = importlib.import_module( - "{}.sulcilabellingspammarkov".format(axon_module) + f"{axon_module}.sulcilabellingspammarkov" ) try: sulcideeplabeling = importlib.import_module(cnn_module) @@ -231,10 +231,10 @@ def declare_morpho_schemas(morpho_module): sulcideeplabeling = importlib.import_module(cnn_module) else: raise - brainvolumes = importlib.import_module("{}.brainvolumes".format(axon_module)) - morpho_report = importlib.import_module("{}.morpho_report".format(axon_module)) + brainvolumes = importlib.import_module(f"{axon_module}.brainvolumes") + morpho_report = importlib.import_module(f"{axon_module}.morpho_report") sulcigraphmorphometrybysubject = importlib.import_module( - "{}.sulcigraphmorphometrybysubject".format(axon_module) + f"{axon_module}.sulcigraphmorphometrybysubject" ) # patch processes to setup their requirements and schemas From 42e3a8379b948881f1453b5a0aa3078f6810c714 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 20 Nov 2023 22:03:12 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- capsul/pipeline/pipeline.py | 8 ++------ capsul/pipeline/pipeline_nodes.py | 5 +---- capsul/pipeline/python_export.py | 4 +--- capsul/pipeline/topological_sort.py | 10 +++------- capsul/qt_apps/utils/application.py | 4 +--- capsul/qt_gui/widgets/activation_inspector.py | 8 ++------ capsul/qt_gui/widgets/pipeline_developer_view.py | 4 +--- capsul/schemas/brainvisa.py | 16 ++++------------ 8 files changed, 15 insertions(+), 44 deletions(-) diff --git a/capsul/pipeline/pipeline.py b/capsul/pipeline/pipeline.py index 67d5fb422..47720cfee 100644 --- a/capsul/pipeline/pipeline.py +++ b/capsul/pipeline/pipeline.py @@ -958,15 +958,11 @@ def add_link(self, link, weak_link=False, allow_export=False): if not source_plug.output and source_node is not self: raise ValueError(f"Cannot link from an input plug: {link}") if source_plug.output and source_node is self: - raise ValueError( - f"Cannot link from a pipeline output plug: {link}" - ) + raise ValueError(f"Cannot link from a pipeline output plug: {link}") if dest_plug.output and dest_node is not self: raise ValueError(f"Cannot link to an output plug: {link}") if not dest_plug.output and dest_node is self: - raise ValueError( - f"Cannot link to a pipeline input plug: {link}" - ) + raise ValueError(f"Cannot link to a pipeline input plug: {link}") # Propagate the plug value from source to destination value = getattr(source_node, source_plug_name, None) diff --git a/capsul/pipeline/pipeline_nodes.py b/capsul/pipeline/pipeline_nodes.py index b58af6d22..247771e1e 100644 --- a/capsul/pipeline/pipeline_nodes.py +++ b/capsul/pipeline/pipeline_nodes.py @@ -146,10 +146,7 @@ def __init__( flat_inputs = [] for switch_name in inputs: flat_inputs.extend( - [ - f"{switch_name}_switch_{plug_name}" - for plug_name in outputs - ] + [f"{switch_name}_switch_{plug_name}" for plug_name in outputs] ) node_inputs = [ dict(name="switch"), diff --git a/capsul/pipeline/python_export.py b/capsul/pipeline/python_export.py index 304eae5fd..9e9d22a19 100644 --- a/capsul/pipeline/python_export.py +++ b/capsul/pipeline/python_export.py @@ -182,9 +182,7 @@ def _write_custom_node(node, pyf, name, enabled): file=pyf, ) else: - print( - f' self.add_custom_node("{name}", "{nodename}")', file=pyf - ) + print(f' self.add_custom_node("{name}", "{nodename}")', file=pyf) # optional plugs for plug_name, plug in node.plugs.items(): if plug.optional: diff --git a/capsul/pipeline/topological_sort.py b/capsul/pipeline/topological_sort.py index e7e5f57a3..f1fe205a2 100644 --- a/capsul/pipeline/topological_sort.py +++ b/capsul/pipeline/topological_sort.py @@ -141,9 +141,7 @@ def add_node(self, node): if not isinstance(node, GraphNode): raise Exception(f"Expect a GraphNode, got {node}") if node.name in self._nodes: - raise Exception( - f"Expect a GraphNode with a unique name, got {node}" - ) + raise Exception(f"Expect a GraphNode with a unique name, got {node}") self._nodes[node.name] = node def find_node(self, node_name): @@ -170,13 +168,11 @@ def add_link(self, from_node, to_node): """ if from_node not in self._nodes: raise Exception( - f"Node {from_node} is not defined in the Graph." - "Use add_node() method" + f"Node {from_node} is not defined in the Graph." "Use add_node() method" ) if to_node not in self._nodes: raise Exception( - f"Node {to_node} is not defined in the Graph." - "Use add_node() method" + f"Node {to_node} is not defined in the Graph." "Use add_node() method" ) if (from_node, to_node) not in self._links: self._nodes[to_node].add_link_from(self._nodes[from_node]) diff --git a/capsul/qt_apps/utils/application.py b/capsul/qt_apps/utils/application.py index adf191435..284f2890f 100644 --- a/capsul/qt_apps/utils/application.py +++ b/capsul/qt_apps/utils/application.py @@ -115,9 +115,7 @@ def __init__(self, extra_options=None): # If a no valid logging level is found raise an Exception if level is None: - raise Exception( - f"Warning : unknown logging level {self.options.debug}" - ) + raise Exception(f"Warning : unknown logging level {self.options.debug}") # Configure the logging module logging.basicConfig(level=level, format=logging_format, datefmt=date_format) diff --git a/capsul/qt_gui/widgets/activation_inspector.py b/capsul/qt_gui/widgets/activation_inspector.py index a30685810..cccdc0e62 100644 --- a/capsul/qt_gui/widgets/activation_inspector.py +++ b/capsul/qt_gui/widgets/activation_inspector.py @@ -248,9 +248,7 @@ def refresh_activation_from_record(self): self.activations.append(current_activations.copy()) # > Add a line to the activation display - self.ui.events.addItem( - f"{iteration}{activation} {node}:{plug}" - ) + self.ui.events.addItem(f"{iteration}{activation} {node}:{plug}") # Select the last activation step so the pipeline will be # in his final configuration @@ -270,9 +268,7 @@ def update_pipeline_activation(self, index): # Restore the plugs and nodes activations node_name = node.full_name for plug_name, plug in node.plugs.items(): - plug.activated = activations.get( - f"{node_name}:{plug_name}", False - ) + plug.activated = activations.get(f"{node_name}:{plug_name}", False) node.activated = activations.get(f"{node_name}:", False) # Refresh views relying on plugs and nodes selection diff --git a/capsul/qt_gui/widgets/pipeline_developer_view.py b/capsul/qt_gui/widgets/pipeline_developer_view.py index 65710e727..1c234e7c4 100644 --- a/capsul/qt_gui/widgets/pipeline_developer_view.py +++ b/capsul/qt_gui/widgets/pipeline_developer_view.py @@ -5246,9 +5246,7 @@ def hinted_tuple_hook(obj): dic = json.loads(dic, object_hook=hinted_tuple_hook) if "pipeline_parameters" not in list(dic.keys()): - raise KeyError( - f'No "pipeline_parameters" key found in {filename}.' - ) + raise KeyError(f'No "pipeline_parameters" key found in {filename}.') for field_name, field_value in dic["pipeline_parameters"].items(): if field_name not in [ diff --git a/capsul/schemas/brainvisa.py b/capsul/schemas/brainvisa.py index 37c95260e..dd5399e1d 100644 --- a/capsul/schemas/brainvisa.py +++ b/capsul/schemas/brainvisa.py @@ -190,29 +190,21 @@ def declare_morpho_schemas(morpho_module): normalization_fsl_reinit = importlib.import_module( f"{axon_module}.normalization_fsl_reinit" ) - t1biascorrection = importlib.import_module( - f"{axon_module}.t1biascorrection" - ) + t1biascorrection = importlib.import_module(f"{axon_module}.t1biascorrection") histoanalysis = importlib.import_module(f"{axon_module}.histoanalysis") - brainsegmentation = importlib.import_module( - f"{axon_module}.brainsegmentation" - ) + brainsegmentation = importlib.import_module(f"{axon_module}.brainsegmentation") skullstripping = importlib.import_module(f"{axon_module}.skullstripping") scalpmesh = importlib.import_module(f"{axon_module}.scalpmesh") splitbrain = importlib.import_module(f"{axon_module}.splitbrain") greywhiteclassificationhemi = importlib.import_module( f"{axon_module}.greywhiteclassificationhemi" ) - greywhitetopology = importlib.import_module( - f"{axon_module}.greywhitetopology" - ) + greywhitetopology = importlib.import_module(f"{axon_module}.greywhitetopology") greywhitemesh = importlib.import_module(f"{axon_module}.greywhitemesh") pialmesh = importlib.import_module(f"{axon_module}.pialmesh") sulciskeleton = importlib.import_module(f"{axon_module}.sulciskeleton") sulcigraph = importlib.import_module(f"{axon_module}.sulcigraph") - sulcilabellingann = importlib.import_module( - f"{axon_module}.sulcilabellingann" - ) + sulcilabellingann = importlib.import_module(f"{axon_module}.sulcilabellingann") sulcilabellingspamglobal = importlib.import_module( f"{axon_module}.sulcilabellingspamglobal" )