Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 20, 2023
1 parent 6946bda commit 42e3a83
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 44 deletions.
8 changes: 2 additions & 6 deletions capsul/pipeline/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -958,15 +958,11 @@ def add_link(self, link, weak_link=False, allow_export=False):
if not source_plug.output and source_node is not self:
raise ValueError(f"Cannot link from an input plug: {link}")
if source_plug.output and source_node is self:
raise ValueError(
f"Cannot link from a pipeline output plug: {link}"
)
raise ValueError(f"Cannot link from a pipeline output plug: {link}")
if dest_plug.output and dest_node is not self:
raise ValueError(f"Cannot link to an output plug: {link}")
if not dest_plug.output and dest_node is self:
raise ValueError(
f"Cannot link to a pipeline input plug: {link}"
)
raise ValueError(f"Cannot link to a pipeline input plug: {link}")

# Propagate the plug value from source to destination
value = getattr(source_node, source_plug_name, None)
Expand Down
5 changes: 1 addition & 4 deletions capsul/pipeline/pipeline_nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,10 +146,7 @@ def __init__(
flat_inputs = []
for switch_name in inputs:
flat_inputs.extend(
[
f"{switch_name}_switch_{plug_name}"
for plug_name in outputs
]
[f"{switch_name}_switch_{plug_name}" for plug_name in outputs]
)
node_inputs = [
dict(name="switch"),
Expand Down
4 changes: 1 addition & 3 deletions capsul/pipeline/python_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,7 @@ def _write_custom_node(node, pyf, name, enabled):
file=pyf,
)
else:
print(
f' self.add_custom_node("{name}", "{nodename}")', file=pyf
)
print(f' self.add_custom_node("{name}", "{nodename}")', file=pyf)
# optional plugs
for plug_name, plug in node.plugs.items():
if plug.optional:
Expand Down
10 changes: 3 additions & 7 deletions capsul/pipeline/topological_sort.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,9 +141,7 @@ def add_node(self, node):
if not isinstance(node, GraphNode):
raise Exception(f"Expect a GraphNode, got {node}")
if node.name in self._nodes:
raise Exception(
f"Expect a GraphNode with a unique name, got {node}"
)
raise Exception(f"Expect a GraphNode with a unique name, got {node}")
self._nodes[node.name] = node

def find_node(self, node_name):
Expand All @@ -170,13 +168,11 @@ def add_link(self, from_node, to_node):
"""
if from_node not in self._nodes:
raise Exception(
f"Node {from_node} is not defined in the Graph."
"Use add_node() method"
f"Node {from_node} is not defined in the Graph." "Use add_node() method"
)
if to_node not in self._nodes:
raise Exception(
f"Node {to_node} is not defined in the Graph."
"Use add_node() method"
f"Node {to_node} is not defined in the Graph." "Use add_node() method"
)
if (from_node, to_node) not in self._links:
self._nodes[to_node].add_link_from(self._nodes[from_node])
Expand Down
4 changes: 1 addition & 3 deletions capsul/qt_apps/utils/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,7 @@ def __init__(self, extra_options=None):

# If a no valid logging level is found raise an Exception
if level is None:
raise Exception(
f"Warning : unknown logging level {self.options.debug}"
)
raise Exception(f"Warning : unknown logging level {self.options.debug}")

# Configure the logging module
logging.basicConfig(level=level, format=logging_format, datefmt=date_format)
Expand Down
8 changes: 2 additions & 6 deletions capsul/qt_gui/widgets/activation_inspector.py
Original file line number Diff line number Diff line change
Expand Up @@ -248,9 +248,7 @@ def refresh_activation_from_record(self):
self.activations.append(current_activations.copy())

# > Add a line to the activation display
self.ui.events.addItem(
f"{iteration}{activation} {node}:{plug}"
)
self.ui.events.addItem(f"{iteration}{activation} {node}:{plug}")

# Select the last activation step so the pipeline will be
# in his final configuration
Expand All @@ -270,9 +268,7 @@ def update_pipeline_activation(self, index):
# Restore the plugs and nodes activations
node_name = node.full_name
for plug_name, plug in node.plugs.items():
plug.activated = activations.get(
f"{node_name}:{plug_name}", False
)
plug.activated = activations.get(f"{node_name}:{plug_name}", False)
node.activated = activations.get(f"{node_name}:", False)

# Refresh views relying on plugs and nodes selection
Expand Down
4 changes: 1 addition & 3 deletions capsul/qt_gui/widgets/pipeline_developer_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -5246,9 +5246,7 @@ def hinted_tuple_hook(obj):
dic = json.loads(dic, object_hook=hinted_tuple_hook)

if "pipeline_parameters" not in list(dic.keys()):
raise KeyError(
f'No "pipeline_parameters" key found in {filename}.'
)
raise KeyError(f'No "pipeline_parameters" key found in {filename}.')

for field_name, field_value in dic["pipeline_parameters"].items():
if field_name not in [
Expand Down
16 changes: 4 additions & 12 deletions capsul/schemas/brainvisa.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,29 +190,21 @@ def declare_morpho_schemas(morpho_module):
normalization_fsl_reinit = importlib.import_module(
f"{axon_module}.normalization_fsl_reinit"
)
t1biascorrection = importlib.import_module(
f"{axon_module}.t1biascorrection"
)
t1biascorrection = importlib.import_module(f"{axon_module}.t1biascorrection")
histoanalysis = importlib.import_module(f"{axon_module}.histoanalysis")
brainsegmentation = importlib.import_module(
f"{axon_module}.brainsegmentation"
)
brainsegmentation = importlib.import_module(f"{axon_module}.brainsegmentation")
skullstripping = importlib.import_module(f"{axon_module}.skullstripping")
scalpmesh = importlib.import_module(f"{axon_module}.scalpmesh")
splitbrain = importlib.import_module(f"{axon_module}.splitbrain")
greywhiteclassificationhemi = importlib.import_module(
f"{axon_module}.greywhiteclassificationhemi"
)
greywhitetopology = importlib.import_module(
f"{axon_module}.greywhitetopology"
)
greywhitetopology = importlib.import_module(f"{axon_module}.greywhitetopology")
greywhitemesh = importlib.import_module(f"{axon_module}.greywhitemesh")
pialmesh = importlib.import_module(f"{axon_module}.pialmesh")
sulciskeleton = importlib.import_module(f"{axon_module}.sulciskeleton")
sulcigraph = importlib.import_module(f"{axon_module}.sulcigraph")
sulcilabellingann = importlib.import_module(
f"{axon_module}.sulcilabellingann"
)
sulcilabellingann = importlib.import_module(f"{axon_module}.sulcilabellingann")
sulcilabellingspamglobal = importlib.import_module(
f"{axon_module}.sulcilabellingspamglobal"
)
Expand Down

0 comments on commit 42e3a83

Please sign in to comment.