diff --git a/pylintrc b/pylintrc index f10aa37..374704c 100644 --- a/pylintrc +++ b/pylintrc @@ -1,24 +1,78 @@ -[MASTER] +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. -extension-pkg-whitelist= +extension-pkg-allow-list= -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=.git +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked and +# will not be imported (useful for modules/projects where namespaces are +# manipulated during runtime and thus existing member attributes cannot be +# deduced by static analysis). It supports qualified module names, as well as +# Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). init-hook='import sys; sys.path.append("spine_engine")' # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. jobs=1 # Control the amount of potential inferred values when inferring a single @@ -26,101 +80,40 @@ jobs=1 # complex, nested conditions. limit-inference-results=100 -# List of plugins (as comma separated values of python modules names) to load, +# List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes +# Resolve imports to .pyi stubs if available. May reduce no-member messages and +# increase not-an-iterable messages. +prefer-stubs=no + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.9 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. suggestion-mode=yes # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=yes - +unsafe-load-any-extension=no -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=bad-continuation, - duplicate-code, - fixme, - inconsistent-return-statements, - invalid-name, - line-too-long, - missing-docstring, - no-member, # Workaround for a bug in pylint, see https://github.com/PyCQA/pylint/issues/2585 - protected-access, - too-few-public-methods, - too-many-ancestors, - too-many-arguments, - too-many-branches, - too-many-instance-attributes, - too-many-nested-blocks, - too-many-public-methods, - too-many-lines, - too-many-locals, - too-many-return-statements, - too-many-statements, - unused-argument - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable= - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= [BASIC] @@ -129,13 +122,15 @@ never-returning-functions=sys.exit argument-naming-style=snake_case # Regular expression matching correct argument names. Overrides argument- -# naming-style. +# naming-style. If left empty, argument names will be checked with the set +# naming style. #argument-rgx= # Naming style matching correct attribute names. attr-naming-style=snake_case # Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming # style. #attr-rgx= @@ -147,24 +142,38 @@ bad-names=foo, tutu, tata +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + # Naming style matching correct class attribute names. class-attribute-naming-style=any # Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. #class-attribute-rgx= +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + # Naming style matching correct class names. class-naming-style=PascalCase # Regular expression matching correct class names. Overrides class-naming- -# style. +# style. If left empty, class names will be checked with the set naming style. #class-rgx= # Naming style matching correct constant names. const-naming-style=UPPER_CASE # Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming # style. #const-rgx= @@ -176,7 +185,8 @@ docstring-min-length=-1 function-naming-style=snake_case # Regular expression matching correct function names. Overrides function- -# naming-style. +# naming-style. If left empty, function names will be checked with the set +# naming style. #function-rgx= # Good variable names which should always be accepted, separated by a comma. @@ -185,11 +195,12 @@ good-names=i, k, ex, Run, - ui, - x, - y, _ +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + # Include a hint for the correct naming format with invalid-name. include-naming-hint=no @@ -197,21 +208,22 @@ include-naming-hint=no inlinevar-naming-style=any # Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. #inlinevar-rgx= # Naming style matching correct method names. method-naming-style=snake_case # Regular expression matching correct method names. Overrides method-naming- -# style. +# style. If left empty, method names will be checked with the set naming style. #method-rgx= # Naming style matching correct module names. module-naming-style=snake_case # Regular expression matching correct module names. Overrides module-naming- -# style. +# style. If left empty, module names will be checked with the set naming style. #module-rgx= # Colon-delimited sets of names that determine each other's naming style when @@ -227,14 +239,93 @@ no-docstring-rgx=^_ # These decorators are taken in consideration only for invalid-name. property-classes=abc.abstractproperty +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + # Naming style matching correct variable names. variable-naming-style=snake_case # Regular expression matching correct variable names. Overrides variable- -# naming-style. +# naming-style. If left empty, variable names will be checked with the set +# naming style. #variable-rgx= +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + asyncSetUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. @@ -251,18 +342,11 @@ indent-after-paren=4 indent-string=' ' # Maximum number of characters on a single line. -max-line-length=100 +max-line-length=120 # Maximum number of lines in a module. max-module-lines=1000 -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no @@ -272,10 +356,49 @@ single-line-class-stmt=no single-line-if-stmt=no +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + [LOGGING] -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. logging-format-style=old # Logging modules to check that the string format arguments are in logging @@ -283,6 +406,73 @@ logging-format-style=old logging-modules=logging +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, + missing-module-docstring, + missing-class-docstring, + missing-function-docstring, + invalid-name, + no-member, + fixme, + protected-access, + too-few-public-methods, + too-many-ancestors, + too-many-arguments, + too-many-branches, + too-many-instance-attributes, + too-many-nested-blocks, + too-many-public-methods, + too-many-lines, + too-many-locals, + too-many-return-statements, + too-many-statements, + unused-argument, + inconsistent-return-statements, + wrong-import-order, + ungrouped-imports + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. @@ -290,20 +480,69 @@ notes=FIXME, XXX, TODO +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + +# Let 'consider-using-join' be raised when the separator to join on would be +# non-empty (resulting in expected fixes of the type: ``"- " + " - +# ".join(items)``) +suggest-join-with-non-empty-separator=yes + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + [SIMILARITIES] -# Ignore comments when computing similarities. +# Comments are removed from the similarity computation ignore-comments=yes -# Ignore docstrings when computing similarities. +# Docstrings are removed from the similarity computation ignore-docstrings=yes -# Ignore imports when computing similarities. -ignore-imports=no +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes # Minimum lines number of a similarity. -min-similarity-lines=6 +min-similarity-lines=4 [SPELLING] @@ -311,26 +550,33 @@ min-similarity-lines=6 # Limits count of emitted suggestions for spelling mistakes. max-spelling-suggestions=4 -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. spelling-dict= +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + # List of comma separated words that should not be checked. spelling-ignore-words= -# A path to a file that contains private dictionary; one word per line. +# A path to a file that contains the private dictionary; one word per line. spelling-private-dict-file= -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. spelling-store-unknown-words=no [STRING] -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. check-str-concat-over-line-jumps=no @@ -346,10 +592,6 @@ contextmanager-decorators=contextlib.contextmanager # expressions are accepted. generated-members= -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. ignore-none=yes @@ -362,16 +604,16 @@ ignore-none=yes # the rest of the inferred objects. ignore-on-opaque-inference=yes +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. @@ -385,6 +627,12 @@ missing-member-hint-distance=1 # showing a hint for a missing member. missing-member-max-choices=1 +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + [VARIABLES] @@ -393,19 +641,12 @@ missing-member-max-choices=1 additional-builtins= # Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb # A regular expression matching the name of dummy variables (i.e. expected to # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. +# Argument names that match this expression will be ignored. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. @@ -413,100 +654,4 @@ init-import=no # List of qualified module names which can have objects that can redefine # builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement. -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io \ No newline at end of file diff --git a/spine_engine/execution_managers/conda_kernel_spec_manager.py b/spine_engine/execution_managers/conda_kernel_spec_manager.py index 0318adc..f527540 100644 --- a/spine_engine/execution_managers/conda_kernel_spec_manager.py +++ b/spine_engine/execution_managers/conda_kernel_spec_manager.py @@ -80,7 +80,7 @@ def _validate_kernelspec_path(self, proposal): def __init__(self, **kwargs): self._conda_executable = kwargs.pop("conda_exe") - super(CondaKernelSpecManager, self).__init__(**kwargs) + super().__init__(**kwargs) self.log = logging.getLogger(__name__) self.log.setLevel(logging.WARNING) self._conda_info_cache = None @@ -140,7 +140,7 @@ def _conda_info(self): ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") result = ansi_escape.sub("", p) # Remove ANSI Escape Sequences, such as ESC[0m conda_info = json.loads(result) - except Exception as err: + except Exception: conda_info = None self.log.error("Obtaining 'conda info --json' failed") self._conda_info_cache = conda_info @@ -183,12 +183,12 @@ def _all_envs(self): # as created by, say, conda or anaconda-project. The name # of the parent directory, then, provides useful context. if basename(env_base) == "envs" and (env_base != envs_prefix or env_name in all_envs): - env_name = "{}-{}".format(basename(dirname(env_base)), env_name) + env_name = f"{basename(dirname(env_base))}-{env_name}" # Further disambiguate, if necessary, with a counter. if env_name in all_envs: base_name = env_name for count in range(len(all_envs)): - env_name = "{}-{}".format(base_name, count + 2) + env_name = f"{base_name}-{count + 2}" if env_name not in all_envs: break all_envs[env_name] = env_path @@ -235,7 +235,7 @@ def _all_specs(self): elif kernel_name == "ir": kernel_name = "r" kernel_prefix = "" if env_name == "root" else "env-" - kernel_name = "conda-{}{}-{}".format(kernel_prefix, env_name, kernel_name) + kernel_name = f"conda-{kernel_prefix}{env_name}-{kernel_name}" # Replace invalid characters with dashes kernel_name = self.clean_kernel_name(kernel_name) @@ -326,7 +326,7 @@ def find_kernel_specs(self): if self.conda_only: kspecs = {} else: - kspecs = super(CondaKernelSpecManager, self).find_kernel_specs() + kspecs = super().find_kernel_specs() # add conda envs kernelspecs if self.whitelist: @@ -350,7 +350,7 @@ def get_kernel_spec(self, kernel_name): return None self.log.info(f"res.argv:{res.argv}") if res is None and not self.conda_only: - res = super(CondaKernelSpecManager, self).get_kernel_spec(kernel_name) + res = super().get_kernel_spec(kernel_name) return res def get_all_specs(self): @@ -377,7 +377,7 @@ def remove_kernel_spec(self, name): self.ensure_native_kernel = False # Conda environment kernelspec are only virtual, so remove can only be applied # on non-virtual kernels. - specs = super(CondaKernelSpecManager, self).find_kernel_specs() + specs = super().find_kernel_specs() finally: self.ensure_native_kernel = save_native spec_dir = specs[name] diff --git a/spine_engine/execution_managers/conda_kernel_spec_runner.py b/spine_engine/execution_managers/conda_kernel_spec_runner.py index 6aa3099..7109a65 100644 --- a/spine_engine/execution_managers/conda_kernel_spec_runner.py +++ b/spine_engine/execution_managers/conda_kernel_spec_runner.py @@ -47,9 +47,7 @@ def exec_in_env(conda_prefix, env_path, *command): os.execvp(quoted_command[0], quoted_command) else: activate = os.path.join(conda_prefix, "bin", "activate") - ecomm = ". '{}' '{}' && echo CONDA_PREFIX=$CONDA_PREFIX && exec {}".format( - activate, env_path, " ".join(quoted_command) - ) + ecomm = f". '{activate}' '{env_path}' && echo CONDA_PREFIX=$CONDA_PREFIX && exec {' '.join(quoted_command)}" ecomm = ["sh" if "bsd" in sys.platform else "bash", "-c", ecomm] os.execvp(ecomm[0], ecomm) diff --git a/spine_engine/execution_managers/kernel_execution_manager.py b/spine_engine/execution_managers/kernel_execution_manager.py index eae9d7b..c8523e4 100644 --- a/spine_engine/execution_managers/kernel_execution_manager.py +++ b/spine_engine/execution_managers/kernel_execution_manager.py @@ -69,9 +69,8 @@ def _make_kernel_manager(self, kernel_name, group_id, server_ip, filter_id): """ if not filter_id == "": group_id = filter_id # Ignore group ID in case filter ID exists - for k in self._kernel_managers: + for km in self._kernel_managers.values(): # Reuse kernel manager if using same group id and kernel and it's idle - km = self._kernel_managers[k] if km.group_id() == group_id and km.kernel_name == kernel_name: if not km.is_busy(): return km @@ -101,11 +100,11 @@ def new_kernel_manager(self, kernel_name, group_id, logger, extra_switches=None, conda_exe = kwargs.pop("conda_exe", "") if environment == "conda": if not os.path.exists(conda_exe): - logger.msg_kernel_execution.emit(msg=dict(type="conda_not_found")) + logger.msg_kernel_execution.emit({"type": "conda_not_found"}) self._kernel_managers.pop(self.get_kernel_manager_key(km)) return None km.kernel_spec_manager = CondaKernelSpecManager(conda_exe=conda_exe) - msg = dict(kernel_name=kernel_name) + msg = {"kernel_name": kernel_name} if not km.is_alive(): try: if not km.kernel_spec: @@ -218,7 +217,7 @@ def kill_kernel_managers(self): """Shuts down all kernel managers stored in the factory.""" while True: try: - key, km = self._kernel_managers.popitem() + _, km = self._kernel_managers.popitem() if km.is_alive(): km.shutdown_kernel(now=True) except KeyError: @@ -283,7 +282,7 @@ def __init__( **kwargs (optional): Keyword arguments passed to ``KernelManager.start_kernel()`` """ super().__init__(logger) - self._msg_head = dict(kernel_name=kernel_name) + self._msg_head = {"kernel_name": kernel_name} self._commands = commands self._cmd_failed = False self.std_out = kwargs["stdout"] = open(os.devnull, "w") @@ -306,7 +305,7 @@ def run_until_complete(self): if self._kill_completed: conn_file = self._kernel_manager.connection_file shutdown_kernel_manager(conn_file) - self._logger.msg_kernel_execution.emit(dict(type="kernel_shutdown", **self._msg_head)) + self._logger.msg_kernel_execution.emit({"type": "kernel_shutdown", **self._msg_head}) if self._cmd_failed or not run_succeeded: return -1 return 0 @@ -315,12 +314,12 @@ def _do_run(self): try: self._kernel_client.wait_for_ready(timeout=self._startup_timeout) except RuntimeError as e: - msg = dict(type="execution_failed_to_start", error=str(e), **self._msg_head) + msg = {"type": "execution_failed_to_start", "error": str(e), **self._msg_head} self._logger.msg_kernel_execution.emit(msg) self._kernel_client.stop_channels() self._kernel_manager.shutdown_kernel(now=True) return False - msg = dict(type="execution_started", **self._msg_head) + msg = {"type": "execution_started", **self._msg_head} self._logger.msg_kernel_execution.emit(msg) for cmd in self._commands: self._cmd_failed = False @@ -341,7 +340,7 @@ def _output_hook(self, msg): elif msg["header"]["msg_type"] == "status": # Set kernel manager busy if execution is starting or in progress exec_state = msg["content"]["execution_state"] - if exec_state == "busy" or exec_state == "starting": + if exec_state in {"busy", "starting"}: self._kernel_manager.set_busy(True) else: # exec_state == 'idle' self._kernel_manager.set_busy(False) @@ -357,4 +356,4 @@ def stop_execution(self): if self._kill_completed: conn_file = self._kernel_manager.connection_file shutdown_kernel_manager(conn_file) - self._logger.msg_kernel_execution.emit(dict(type="kernel_shutdown", **self._msg_head)) + self._logger.msg_kernel_execution.emit({"type": "kernel_shutdown", **self._msg_head}) diff --git a/spine_engine/execution_managers/persistent_execution_manager.py b/spine_engine/execution_managers/persistent_execution_manager.py index 0e0a573..bb9d0ee 100644 --- a/spine_engine/execution_managers/persistent_execution_manager.py +++ b/spine_engine/execution_managers/persistent_execution_manager.py @@ -54,7 +54,7 @@ def __init__(self, args, group_id): self._is_running_lock = Lock() self._is_running = True self._persistent_resources_release_lock = Lock() - self._kwargs = dict(stdin=PIPE, stdout=PIPE, stderr=PIPE) + self._kwargs = {"stdin": PIPE, "stdout": PIPE, "stderr": PIPE} if sys.platform == "win32": self._kwargs["creationflags"] = CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW # Setup Popen to not show console in frozen app. Another option is to use @@ -149,7 +149,7 @@ def _log_stdout(self): try: for line in iter(self._persistent.stdout.readline, b""): data = line.decode("UTF8", "replace").rstrip() - self._msg_queue.put(dict(type="stdout", data=data)) + self._msg_queue.put({"type": "stdout", "data": data}) except ValueError: pass @@ -158,7 +158,7 @@ def _log_stderr(self): try: for line in iter(self._persistent.stderr.readline, b""): data = line.decode("UTF8", "replace").rstrip() - self._msg_queue.put(dict(type="stderr", data=data)) + self._msg_queue.put({"type": "stderr", "data": data}) except ValueError: pass @@ -464,6 +464,7 @@ def _release_persistent_resources(self): def _send_ctrl_c(pid): + # pylint: disable=possibly-used-before-assignment kernel = ctypes.windll.kernel32 kernel.FreeConsole() kernel.AttachConsole(pid) @@ -549,7 +550,7 @@ def __bool__(self): @staticmethod def _emit_persistent_started(logger, key, language): - msg = dict(type="persistent_started", key=key, language=language) + msg = {"type": "persistent_started", "key": key, "language": language} logger.msg_persistent_execution.emit(msg) def _get_idle_persistent_managers(self): @@ -602,7 +603,7 @@ def new_persistent_manager(self, constructor, logger, args, group_id): try: pm = self.persistent_managers[key] = constructor(args, group_id) except OSError as err: - msg = dict(type="persistent_failed_to_start", args=" ".join(args), error=str(err)) + msg = {"type": "persistent_failed_to_start", "args": " ".join(args), "error": str(err)} logger.msg_persistent_execution.emit(msg) return None self._emit_persistent_started(logger, key, pm.language) @@ -658,8 +659,7 @@ def issue_persistent_command(self, key, cmd): pm = self.persistent_managers.get(key) if pm is None: return () - for msg in pm.issue_command(cmd, add_history=True, catch_exception=False): - yield msg + yield from pm.issue_command(cmd, add_history=True, catch_exception=False) def is_persistent_command_complete(self, key, cmd): """Checks whether a command is complete. @@ -688,7 +688,7 @@ def get_persistent_completions(self, key, text): """ pm = self.persistent_managers.get(key) if pm is None: - return + return [] return pm.get_completions(text) def get_persistent_history_item(self, key, text, prefix, backwards): @@ -702,7 +702,7 @@ def get_persistent_history_item(self, key, text, prefix, backwards): """ pm = self.persistent_managers.get(key) if pm is None: - return + return "" return pm.get_history_item(text, prefix, backwards) def kill_manager_processes(self): @@ -824,10 +824,10 @@ def run_until_complete(self): return -1 self._persistent_manager.set_running_until_completion(True) try: - msg = dict(type="execution_started", args=" ".join(self._args)) + msg = {"type": "execution_started", "args": " ".join(self._args)} self._logger.msg_persistent_execution.emit(msg) fmt_alias = "# Running " + self._alias.rstrip() - self._logger.msg_persistent_execution.emit(dict(type="stdin", data=fmt_alias)) + self._logger.msg_persistent_execution.emit({"type": "stdin", "data": fmt_alias}) for cmd in self._commands: for msg in self._persistent_manager.issue_command(cmd): if msg["type"] != "stdin": diff --git a/spine_engine/execution_managers/process_execution_manager.py b/spine_engine/execution_managers/process_execution_manager.py index fcc013c..4bd40ae 100644 --- a/spine_engine/execution_managers/process_execution_manager.py +++ b/spine_engine/execution_managers/process_execution_manager.py @@ -54,10 +54,10 @@ def run_until_complete(self): creationflags=cf, ) except OSError as e: - msg = dict(type="execution_failed_to_start", error=str(e), program=self._program) + msg = {"type": "execution_failed_to_start", "error": str(e), "program": self._program} self._logger.msg_standard_execution.emit(msg) return 1 - msg = dict(type="execution_started", program=self._program, args=" ".join(self._args)) + msg = {"type": "execution_started", "program": self._program, "args": " ".join(self._args)} self._logger.msg_standard_execution.emit(msg) running = "# Running" + " ".join([self._program] + self._args) self._logger.msg_standard_execution.emit({"type": "stdin", "data": running}) diff --git a/spine_engine/jumpster.py b/spine_engine/jumpster.py index ce55e09..8c7461f 100644 --- a/spine_engine/jumpster.py +++ b/spine_engine/jumpster.py @@ -153,7 +153,7 @@ def execute(self): while not self._is_complete() or active_iters: # start iterators while len(active_iters) < self._max_concurrent: - candidate_steps = self._get_steps_to_execute(limit=(self._max_concurrent - len(active_iters))) + candidate_steps = self._get_steps_to_execute(limit=self._max_concurrent - len(active_iters)) step_by_key.update({step.key: step for step in candidate_steps}) # Add all waiting steps candidate_steps += list(waiting.values()) @@ -257,12 +257,9 @@ def execute(self): del active_iters[key] errs = {tid: err for tid, err in errors.items() if err} if errs: + error_list = "\n".join([f"In thread {tid}: {err.to_string()}" for tid, err in errs.items()]) raise JumpsterThreadError( - "During multithread execution errors occurred in threads:\n{error_list}".format( - error_list="\n".join( - ["In thread {tid}: {err}".format(tid=tid, err=err.to_string()) for tid, err in errs.items()] - ) - ), + f"During multithread execution errors occurred in threads:\n{error_list}", thread_error_infos=list(errs.values()), ) diff --git a/spine_engine/project_item/connection.py b/spine_engine/project_item/connection.py index b5a7c2f..763121f 100644 --- a/spine_engine/project_item/connection.py +++ b/spine_engine/project_item/connection.py @@ -157,7 +157,7 @@ def make_logger(self, queue): self._logger = QueueLogger(queue, self.name, None, {}) def emit_flash(self): - self._logger.flash.emit() + self._logger.flash.emit("") DEFAULT_ENABLED_FILTER_TYPES = {ALTERNATIVE_FILTER_TYPE: False, SCENARIO_FILTER_TYPE: True} @@ -590,7 +590,7 @@ class Jump(ConnectionBase): """Represents a conditional jump between two project items.""" def __init__( - self, source_name, source_position, destination_name, destination_position, condition={}, cmd_line_args=() + self, source_name, source_position, destination_name, destination_position, condition=None, cmd_line_args=() ): """ Args: @@ -664,7 +664,7 @@ def _is_python_script_condition_true(self, jump_counter): script_file.seek(0) python = resolve_current_python_interpreter() result = subprocess.run( - [python, "-", *expanded_args], encoding="utf-8", stdin=script_file, capture_output=True + [python, "-", *expanded_args], encoding="utf-8", stdin=script_file, capture_output=True, check=False ) if result.stdout: self._logger.msg_proc.emit(result.stdout) diff --git a/spine_engine/project_item/executable_item_base.py b/spine_engine/project_item/executable_item_base.py index ca39fd3..1ca5a68 100644 --- a/spine_engine/project_item/executable_item_base.py +++ b/spine_engine/project_item/executable_item_base.py @@ -179,7 +179,6 @@ def stop_execution(self): """Stops executing this item.""" self._logger.msg.emit(f"Stopping {self._name}") - # pylint: disable=no-self-use def _output_resources_forward(self): """Returns output resources for forward execution. @@ -190,7 +189,6 @@ def _output_resources_forward(self): """ return [] - # pylint: disable=no-self-use def _output_resources_backward(self): """Returns output resources for backward execution. diff --git a/spine_engine/project_item/project_item_resource.py b/spine_engine/project_item/project_item_resource.py index 3bb7192..99c5912 100644 --- a/spine_engine/project_item/project_item_resource.py +++ b/spine_engine/project_item/project_item_resource.py @@ -360,7 +360,7 @@ def get_source(resource): """ if resource.type_ in _DATABASE_RESOURCE_TYPES: return resource.url - elif resource.hasfilepath: + if resource.hasfilepath: return resource.path return None diff --git a/spine_engine/server/certificate_creator.py b/spine_engine/server/certificate_creator.py index 1a5ecbb..a1af038 100644 --- a/spine_engine/server/certificate_creator.py +++ b/spine_engine/server/certificate_creator.py @@ -42,8 +42,8 @@ def generate_certificates(base_dir): os.mkdir(d) # create new keys in certificates dir - server_public_file, server_secret_file = zmq.auth.create_certificates(keys_dir, "server") - client_public_file, client_secret_file = zmq.auth.create_certificates(keys_dir, "client") + zmq.auth.create_certificates(keys_dir, "server") + zmq.auth.create_certificates(keys_dir, "client") # move public keys to appropriate directory for key_file in os.listdir(keys_dir): if key_file.endswith(".key"): @@ -58,7 +58,6 @@ def main(args): if len(args) < 2: script_dir = os.path.dirname(os.path.abspath(__file__)) base_dir = os.path.join(script_dir, "certs") - cert_dir = os.path.join(base_dir, "certificates") if os.path.exists(base_dir): print(f"Directory {base_dir} already exists. Please remove it to recreate certificates.") return 0 diff --git a/spine_engine/server/engine_server.py b/spine_engine/server/engine_server.py index 6fbec6c..6e7364e 100644 --- a/spine_engine/server/engine_server.py +++ b/spine_engine/server/engine_server.py @@ -101,17 +101,14 @@ def serve(self): ctrl_msg_listener = self._context.socket(zmq.PAIR) ctrl_msg_listener.connect("inproc://ctrl_msg") if self._sec_model_state == ServerSecurityModel.STONEHOUSE: - try: - self.auth = self.enable_stonehouse_security(frontend) - except ValueError: - raise + self.auth = self.enable_stonehouse_security(frontend) frontend.bind(self.protocol + "://*:" + str(self.port)) poller = zmq.Poller() poller.register(frontend, zmq.POLLIN) poller.register(backend, zmq.POLLIN) poller.register(ctrl_msg_listener, zmq.POLLIN) except Exception as e: - raise ValueError(f"Initializing serve() failed due to exception: {e}") + raise ValueError(f"Initializing serve() failed due to exception: {e}") from e workers = {} project_dirs = {} # Mapping of job Id to an abs. path to a project directory ready for execution persistent_exec_mngr_q = queue.Queue() @@ -242,7 +239,7 @@ def kill_persistent_exec_mngrs(self): n_exec_mngrs = len(self.persistent_exec_mngrs) if n_exec_mngrs > 0: print(f"Closing {len(self.persistent_exec_mngrs)} persistent execution manager processes") - for k, exec_mngr in self.persistent_exec_mngrs.items(): + for exec_mngr in self.persistent_exec_mngrs.values(): exec_mngr._persistent_manager.kill_process() self.persistent_exec_mngrs.clear() @@ -333,8 +330,8 @@ def enable_stonehouse_security(self, frontend): ipaddress.ip_address(ep) auth.allow(ep) allowed.append(ep) # Just for printing - except: - raise ValueError(f"Invalid IP address in allowEndpoints.txt:'{ep}'") + except Exception as exc: + raise ValueError(f"Invalid IP address in allowEndpoints.txt:'{ep}'") from exc allowed_str = "\n".join(allowed) print(f"StoneHouse security activated. Allowed endpoints ({len(allowed)}):\n{allowed_str}") # Tell the authenticator how to handle CURVE requests diff --git a/spine_engine/server/persistent_execution_service.py b/spine_engine/server/persistent_execution_service.py index 2c67ba5..f9f15e3 100644 --- a/spine_engine/server/persistent_execution_service.py +++ b/spine_engine/server/persistent_execution_service.py @@ -31,7 +31,7 @@ def __init__(self, context, request, job_id, persistent_exec_mngr): job_id (str): Worker thread Id persistent_exec_mngr (PersistentExecutionManagerBase): Persistent execution manager """ - super(PersistentExecutionService, self).__init__(name="PersistentExecutionService") + super().__init__(name="PersistentExecutionService") ServiceBase.__init__(self, context, request, job_id) self.persistent_exec_mngr = persistent_exec_mngr self.push_socket = self.context.socket(zmq.PUSH) diff --git a/spine_engine/server/ping_service.py b/spine_engine/server/ping_service.py index f6599ec..0cf3f40 100644 --- a/spine_engine/server/ping_service.py +++ b/spine_engine/server/ping_service.py @@ -32,7 +32,7 @@ def __init__(self, context, request, job_id): request (Request): Client request job_id (str): Worker thread Id """ - super(PingService, self).__init__(name="PingServiceThread") + super().__init__(name="PingServiceThread") ServiceBase.__init__(self, context, request, job_id) def run(self): diff --git a/spine_engine/server/project_extractor_service.py b/spine_engine/server/project_extractor_service.py index a8757f8..f3782d8 100644 --- a/spine_engine/server/project_extractor_service.py +++ b/spine_engine/server/project_extractor_service.py @@ -39,7 +39,7 @@ def __init__(self, context, request, job_id): request (Request): Client request job_id (str): Worker thread Id """ - super(ProjectExtractorService, self).__init__(name="ProjectExtractorServiceThread") + super().__init__(name="ProjectExtractorServiceThread") ServiceBase.__init__(self, context, request, job_id) def run(self): diff --git a/spine_engine/server/project_remover_service.py b/spine_engine/server/project_remover_service.py index 4645878..b2d2385 100644 --- a/spine_engine/server/project_remover_service.py +++ b/spine_engine/server/project_remover_service.py @@ -33,7 +33,7 @@ def __init__(self, context, request, job_id, project_dir): job_id (str): Thread job Id project_dir (str): Absolute path to project directory """ - super(ProjectRemoverService, self).__init__(name="ProjectRemoverServiceThread") + super().__init__(name="ProjectRemoverServiceThread") ServiceBase.__init__(self, context, request, job_id) self.project_dir = project_dir diff --git a/spine_engine/server/project_retriever_service.py b/spine_engine/server/project_retriever_service.py index 808f658..f7ce3ac 100644 --- a/spine_engine/server/project_retriever_service.py +++ b/spine_engine/server/project_retriever_service.py @@ -36,7 +36,7 @@ def __init__(self, context, request, job_id, project_dir): job_id (str): Thread job Id project_dir (str): Absolute path to project directory """ - super(ProjectRetrieverService, self).__init__(name="ProjectRetrieverServiceThread") + super().__init__(name="ProjectRetrieverServiceThread") ServiceBase.__init__(self, context, request, job_id) self.project_dir = project_dir diff --git a/spine_engine/server/remote_execution_service.py b/spine_engine/server/remote_execution_service.py index 4519fab..1e64485 100644 --- a/spine_engine/server/remote_execution_service.py +++ b/spine_engine/server/remote_execution_service.py @@ -100,7 +100,7 @@ def run(self): self.collect_running_items(self.engine._running_items) json_event = EventDataConverter.convert(event_type, data) self.push_socket.send_multipart([json_event.encode("utf-8")]) # Blocks until the client pulls - if data == "COMPLETED" or data == "FAILED" or data == "USER_STOPPED": + if data in {"COMPLETED", "FAILED", "USER_STOPPED"}: break except StopIteration: # Raised by SpineEngine._get_event_stream() generator if we try to get_event() after diff --git a/spine_engine/server/start_server.py b/spine_engine/server/start_server.py index 4f83f0b..c9f4d74 100644 --- a/spine_engine/server/start_server.py +++ b/spine_engine/server/start_server.py @@ -54,8 +54,7 @@ def main(argv): except Exception as e: print(f"start_server.main(): {type(e).__name__}: {e}") break - else: - time.sleep(0.1) + time.sleep(0.1) return diff --git a/spine_engine/server/util/event_data_converter.py b/spine_engine/server/util/event_data_converter.py index f9c8f28..b695268 100644 --- a/spine_engine/server/util/event_data_converter.py +++ b/spine_engine/server/util/event_data_converter.py @@ -75,7 +75,7 @@ def break_event_data(event_type, data): Returns: dict or str: Edited data dictionary or data string as it was. """ - if type(data) != str: + if not isinstance(data, str): if "item_state" in data.keys(): data["item_state"] = str(data["item_state"]) # Cast ItemExecutionFinishState instance to string if "url" in data.keys(): @@ -98,7 +98,7 @@ def break_event_data(event_type, data): # Print warning if there are any tuples used as keys in the data dictionary. # Tuples are converted to lists by json.dumps(). Lists must be converted back to tuples # on client side (in fix_event_data()). - if type(data[key]) == tuple: + if isinstance(data[key], tuple): print(f"[WARNING] Found tuple in message {event_type}: {data}. Fix this on client side.") return data @@ -114,7 +114,7 @@ def fix_event_data(event): """ # Convert item_state str back to ItemExecutionFinishState. This was converted to str on server because # it is not JSON serializable - if type(event[1]) == str: + if isinstance(event[1], str): return event if "item_state" in event[1].keys(): event[1]["item_state"] = convert_execution_finish_state(event[1]["item_state"]) diff --git a/spine_engine/server/util/zip_handler.py b/spine_engine/server/util/zip_handler.py index 643cf67..c51d515 100644 --- a/spine_engine/server/util/zip_handler.py +++ b/spine_engine/server/util/zip_handler.py @@ -38,10 +38,7 @@ def package(src_folder, dst_folder, fname): fname (str): Name of the ZIP-file without extension (it's added by shutil.make_archive()) """ zip_path = os.path.join(dst_folder, fname) - try: - shutil.make_archive(zip_path, "zip", src_folder) - except OSError: - raise + shutil.make_archive(zip_path, "zip", src_folder) @staticmethod def extract(zip_file, output_folder): @@ -77,7 +74,4 @@ def delete_folder(folder): raise ValueError("Invalid input. No folder given.") if not os.path.isdir(folder): raise ValueError(f"Given dir:{folder} does not exist.") - try: - shutil.rmtree(folder) - except OSError: - raise + shutil.rmtree(folder) diff --git a/spine_engine/utils/helpers.py b/spine_engine/utils/helpers.py index ee23ab1..5d6a71a 100644 --- a/spine_engine/utils/helpers.py +++ b/spine_engine/utils/helpers.py @@ -130,7 +130,7 @@ def resolve_current_python_interpreter(): """ if not is_frozen(): return sys.executable - if not sys.platform == "win32": + if sys.platform != "win32": path = resolve_executable_from_path(PYTHON_EXECUTABLE) if path != "": return path @@ -537,10 +537,9 @@ def get_file_size(size_in_bytes): return str(size_in_bytes) + " B" if kb < size_in_bytes <= mb: return str(round(size_in_bytes / kb, 1)) + " KB" - elif mb < size_in_bytes < gb: + if mb < size_in_bytes < gb: return str(round(size_in_bytes / mb, 1)) + " MB" - else: - return str(round(size_in_bytes / gb, 1)) + " GB" + return str(round(size_in_bytes / gb, 1)) + " GB" class PartCount: diff --git a/spine_engine/utils/queue_logger.py b/spine_engine/utils/queue_logger.py index 97ddc0c..6baea9a 100644 --- a/spine_engine/utils/queue_logger.py +++ b/spine_engine/utils/queue_logger.py @@ -34,8 +34,8 @@ def filter_id(self, filter_id): self._filter_id = filter_id def emit(self, msg): - msg = dict(filter_id=self._filter_id, **msg) - full_msg = (self._event_type, dict(item_name=self._item_name, **msg)) + msg = {"filter_id": self._filter_id, **msg} + full_msg = (self._event_type, {"item_name": self._item_name, **msg}) self._queue.put(full_msg) for slot in self._slots: slot(msg) @@ -52,8 +52,8 @@ def __init__(self, queue, item_name, event_type, msg_type): super().__init__(queue, item_name, event_type) self._msg_type = msg_type - def emit(self, msg_text): - super().emit({"msg_type": self._msg_type, "msg_text": msg_text}) + def emit(self, msg): + super().emit({"msg_type": self._msg_type, "msg_text": msg}) class _ExecutionMessage(_MessageBase): @@ -79,11 +79,11 @@ def __init__(self, queue, item_name, prompt_queue, answered_prompts): self._prompt_queue = prompt_queue self._answered_prompts = answered_prompts - def emit(self, prompt_data): - key = str(prompt_data) + def emit(self, msg): + key = str(msg) if key not in self._answered_prompts: self._answered_prompts[key] = self._PENDING - prompt = {"prompter_id": id(self._prompt_queue), "data": prompt_data} + prompt = {"prompter_id": id(self._prompt_queue), "data": msg} self._queue.put(("prompt", prompt)) self._answered_prompts[key] = self._prompt_queue.get() while self._answered_prompts[key] is self._PENDING: @@ -95,7 +95,7 @@ class _Flash(_MessageBase): def __init__(self, queue, item_name): super().__init__(queue, item_name, "flash") - def emit(self): + def emit(self, msg): self._queue.put(("flash", {"item_name": self._item_name})) diff --git a/spine_engine/utils/serialization.py b/spine_engine/utils/serialization.py index e06d93b..6a48c3c 100644 --- a/spine_engine/utils/serialization.py +++ b/spine_engine/utils/serialization.py @@ -125,7 +125,7 @@ def deserialize_path(serialized, project_dir): if path_type == "url": return serialized["path"] except KeyError as error: - raise RuntimeError(f"Key '{error}' missing from serialized path") + raise RuntimeError(f"Key '{error}' missing from serialized path") from error raise RuntimeError(f"Cannot deserialize: unknown path type '{path_type}'") @@ -142,4 +142,4 @@ def deserialize_remote_path(serialized, base_path): path = serialized["path"] return urljoin(base_path, path) except KeyError as error: - raise RuntimeError(f"Key '{error}' missing from serialized url") + raise RuntimeError(f"Key '{error}' missing from serialized url") from error