diff --git a/pyproject.toml b/pyproject.toml
index b1fe7b9f..4ae0e58a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -66,5 +66,4 @@ ignore_errors = true
[tool.black]
line-length = 120
-skip-string-normalization = true
exclude = '\.git'
diff --git a/spine_engine/execution_managers/conda_kernel_spec_manager.py b/spine_engine/execution_managers/conda_kernel_spec_manager.py
index 44b85d43..db37351b 100644
--- a/spine_engine/execution_managers/conda_kernel_spec_manager.py
+++ b/spine_engine/execution_managers/conda_kernel_spec_manager.py
@@ -66,7 +66,7 @@ def _validate_kernelspec_path(self, proposal):
return new_value
name_format = Unicode(
- '{language} [conda env:{environment}]',
+ "{language} [conda env:{environment}]",
config=True,
help="""String name format; available field names within the string:
'{0}' = Language
@@ -106,15 +106,15 @@ def clean_kernel_name(kname):
a bit of effort to preserve readability.
"""
try:
- kname.encode('ascii')
+ kname.encode("ascii")
except UnicodeEncodeError:
# Replace accented characters with unaccented equivalents
import unicodedata
- nfkd_form = unicodedata.normalize('NFKD', kname)
- kname = u"".join([c for c in nfkd_form if not unicodedata.combining(c)])
+ nfkd_form = unicodedata.normalize("NFKD", kname)
+ kname = "".join([c for c in nfkd_form if not unicodedata.combining(c)])
# Replace anything else, including spaces, with underscores
- kname = re.sub(r'[^a-zA-Z0-9._\-]', '_', kname)
+ kname = re.sub(r"[^a-zA-Z0-9._\-]", "_", kname)
return kname
@property
@@ -131,15 +131,15 @@ def _conda_info(self):
# This is to make sure that subprocess can find 'conda' even if
# it is a Windows batch file---which is the case in non-root
# conda environments.
- shell = self._conda_executable == 'conda' and sys.platform.startswith('win')
+ shell = self._conda_executable == "conda" and sys.platform.startswith("win")
try:
# conda info --json uses the standard JSON escaping
# mechanism for non-ASCII characters. So it is always
# valid to decode here as 'ascii', since the JSON loads()
# method will recover any original Unicode for us.
p = subprocess.check_output([self._conda_executable, "info", "--json"], shell=shell).decode("ascii")
- ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
- result = ansi_escape.sub('', p) # Remove ANSI Escape Sequences, such as ESC[0m
+ ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
+ result = ansi_escape.sub("", p) # Remove ANSI Escape Sequences, such as ESC[0m
conda_info = json.loads(result)
except Exception as err:
conda_info = None
@@ -155,24 +155,24 @@ def _all_envs(self):
environment names as keys, and full paths as values.
"""
conda_info = self._conda_info
- envs = conda_info['envs']
- base_prefix = conda_info['conda_prefix']
- envs_prefix = join(base_prefix, 'envs')
- build_prefix = join(base_prefix, 'conda-bld', '')
+ envs = conda_info["envs"]
+ base_prefix = conda_info["conda_prefix"]
+ envs_prefix = join(base_prefix, "envs")
+ build_prefix = join(base_prefix, "conda-bld", "")
# Older versions of conda do not seem to include the base prefix
# in the environment list, but we do want to scan that
if base_prefix not in envs:
envs.insert(0, base_prefix)
- envs_dirs = conda_info['envs_dirs']
+ envs_dirs = conda_info["envs_dirs"]
if not envs_dirs:
- envs_dirs = [join(base_prefix, 'envs')]
+ envs_dirs = [join(base_prefix, "envs")]
all_envs = {}
for env_path in envs:
if self.env_filter is not None:
if self._env_filter_regex.search(env_path):
continue
if env_path == base_prefix:
- env_name = 'root'
+ env_name = "root"
elif env_path.startswith(build_prefix):
# Skip the conda-bld directory entirely
continue
@@ -183,13 +183,13 @@ def _all_envs(self):
# directory named 'envs' is a collection of environments
# as created by, say, conda or anaconda-project. The name
# of the parent directory, then, provides useful context.
- if basename(env_base) == 'envs' and (env_base != envs_prefix or env_name in all_envs):
- env_name = u'{}-{}'.format(basename(dirname(env_base)), env_name)
+ if basename(env_base) == "envs" and (env_base != envs_prefix or env_name in all_envs):
+ env_name = "{}-{}".format(basename(dirname(env_base)), env_name)
# Further disambiguate, if necessary, with a counter.
if env_name in all_envs:
base_name = env_name
for count in range(len(all_envs)):
- env_name = u'{}-{}'.format(base_name, count + 2)
+ env_name = "{}-{}".format(base_name, count + 2)
if env_name not in all_envs:
break
all_envs[env_name] = env_path
@@ -209,16 +209,16 @@ def _all_specs(self):
all_specs = {}
# We need to be able to find conda-run in the base conda environment
# even if this package is not running there
- conda_prefix = self._conda_info['conda_prefix']
+ conda_prefix = self._conda_info["conda_prefix"]
all_envs = self._all_envs()
for env_name, env_path in all_envs.items():
- kspec_base = join(env_path, 'share', 'jupyter', 'kernels')
- kspec_glob = glob.glob(join(kspec_base, '*', 'kernel.json'))
+ kspec_base = join(env_path, "share", "jupyter", "kernels")
+ kspec_glob = glob.glob(join(kspec_base, "*", "kernel.json"))
for spec_path in kspec_glob:
try:
- with open(spec_path, 'rb') as fp:
+ with open(spec_path, "rb") as fp:
data = fp.read()
- spec = json.loads(data.decode('utf-8'))
+ spec = json.loads(data.decode("utf-8"))
except Exception as err:
self.log.error("[nb_conda_kernels] error loading %s:\n%s", spec_path, err)
continue
@@ -231,35 +231,35 @@ def _all_specs(self):
# the naming convention is as close as possible to the previous
# versions of this package; particularly so that the tests
# pass without change.
- if kernel_name in ('python2', 'python3'):
- kernel_name = 'py'
- elif kernel_name == 'ir':
- kernel_name = 'r'
- kernel_prefix = '' if env_name == 'root' else 'env-'
- kernel_name = u'conda-{}{}-{}'.format(kernel_prefix, env_name, kernel_name)
+ if kernel_name in ("python2", "python3"):
+ kernel_name = "py"
+ elif kernel_name == "ir":
+ kernel_name = "r"
+ kernel_prefix = "" if env_name == "root" else "env-"
+ kernel_name = "conda-{}{}-{}".format(kernel_prefix, env_name, kernel_name)
# Replace invalid characters with dashes
kernel_name = self.clean_kernel_name(kernel_name)
- display_prefix = spec['display_name']
- if display_prefix.startswith('Python'):
- display_prefix = 'Python'
+ display_prefix = spec["display_name"]
+ if display_prefix.startswith("Python"):
+ display_prefix = "Python"
display_name = self.name_format.format(
display_prefix,
env_name,
conda_kernel=kernel_name,
- display_name=spec['display_name'],
+ display_name=spec["display_name"],
environment=env_name,
kernel=raw_kernel_name,
language=display_prefix,
)
if env_path == sys.prefix:
- display_name += ' *'
- spec['display_name'] = display_name
+ display_name += " *"
+ spec["display_name"] = display_name
if env_path != sys.prefix:
- spec['argv'] = RUNNER_COMMAND + [conda_prefix, env_path] + spec['argv']
- metadata = spec.get('metadata', {})
- metadata.update({'conda_env_name': env_name, 'conda_env_path': env_path})
- spec['metadata'] = metadata
+ spec["argv"] = RUNNER_COMMAND + [conda_prefix, env_path] + spec["argv"]
+ metadata = spec.get("metadata", {})
+ metadata.update({"conda_env_name": env_name, "conda_env_path": env_path})
+ spec["metadata"] = metadata
if self.kernelspec_path is not None:
# Install the kernel spec
@@ -271,16 +271,16 @@ def _all_specs(self):
kernel_spec = join(destination, "kernel.json")
tmp_spec = spec.copy()
if env_path == sys.prefix: # Add the conda runner to the installed kernel spec
- tmp_spec['argv'] = RUNNER_COMMAND + [conda_prefix, env_path] + spec['argv']
+ tmp_spec["argv"] = RUNNER_COMMAND + [conda_prefix, env_path] + spec["argv"]
with open(kernel_spec, "w") as f:
json.dump(tmp_spec, f)
except OSError as error:
self.log.warning(
- u"[nb_conda_kernels] Fail to install kernel '{}'.".format(kernel_dir), exc_info=error
+ "[nb_conda_kernels] Fail to install kernel '{}'.".format(kernel_dir), exc_info=error
)
# resource_dir is not part of the spec file, so it is added at the latest time
- spec['resource_dir'] = abspath(kernel_dir)
+ spec["resource_dir"] = abspath(kernel_dir)
all_specs[kernel_name] = spec
@@ -363,7 +363,7 @@ def get_all_specs(self):
for name, resource_dir in self.find_kernel_specs().items():
try:
spec = self.get_kernel_spec(name)
- res[name] = {'resource_dir': resource_dir, 'spec': spec.to_dict()}
+ res[name] = {"resource_dir": resource_dir, "spec": spec.to_dict()}
except NoSuchKernel:
self.log.warning("Error loading kernelspec %r", name, exc_info=True)
return res
diff --git a/spine_engine/execution_managers/conda_kernel_spec_runner.py b/spine_engine/execution_managers/conda_kernel_spec_runner.py
index 6605ae8d..1a08bf75 100644
--- a/spine_engine/execution_managers/conda_kernel_spec_runner.py
+++ b/spine_engine/execution_managers/conda_kernel_spec_runner.py
@@ -18,29 +18,29 @@ def exec_in_env(conda_prefix, env_path, *command):
# Run the standard conda activation script, and print the
# resulting environment variables to stdout for reading.
is_current_env = env_path == sys.prefix
- if sys.platform.startswith('win'):
+ if sys.platform.startswith("win"):
if is_current_env:
subprocess.Popen(list(command)).wait()
else:
- activate = os.path.join(conda_prefix, 'Scripts', 'activate.bat')
+ activate = os.path.join(conda_prefix, "Scripts", "activate.bat")
ecomm = [
- os.environ['COMSPEC'],
- '/S',
- '/U',
- '/C',
- '@echo',
- 'off',
- '&&',
- 'chcp',
- '65001',
- '&&',
- 'call',
+ os.environ["COMSPEC"],
+ "/S",
+ "/U",
+ "/C",
+ "@echo",
+ "off",
+ "&&",
+ "chcp",
+ "65001",
+ "&&",
+ "call",
activate,
env_path,
- '&&',
- '@echo',
- 'CONDA_PREFIX=%CONDA_PREFIX%',
- '&&',
+ "&&",
+ "@echo",
+ "CONDA_PREFIX=%CONDA_PREFIX%",
+ "&&",
] + list(command)
subprocess.Popen(ecomm).wait()
else:
@@ -48,13 +48,13 @@ def exec_in_env(conda_prefix, env_path, *command):
if is_current_env:
os.execvp(quoted_command[0], quoted_command)
else:
- activate = os.path.join(conda_prefix, 'bin', 'activate')
+ activate = os.path.join(conda_prefix, "bin", "activate")
ecomm = ". '{}' '{}' && echo CONDA_PREFIX=$CONDA_PREFIX && exec {}".format(
- activate, env_path, ' '.join(quoted_command)
+ activate, env_path, " ".join(quoted_command)
)
- ecomm = ['sh' if 'bsd' in sys.platform else 'bash', '-c', ecomm]
+ ecomm = ["sh" if "bsd" in sys.platform else "bash", "-c", ecomm]
os.execvp(ecomm[0], ecomm)
-if __name__ == '__main__':
+if __name__ == "__main__":
exec_in_env(*(sys.argv[1:]))
diff --git a/spine_engine/execution_managers/kernel_execution_manager.py b/spine_engine/execution_managers/kernel_execution_manager.py
index 0bef1614..4701a03d 100644
--- a/spine_engine/execution_managers/kernel_execution_manager.py
+++ b/spine_engine/execution_managers/kernel_execution_manager.py
@@ -286,8 +286,8 @@ def __init__(
self._msg_head = dict(kernel_name=kernel_name)
self._commands = commands
self._cmd_failed = False
- self.std_out = kwargs["stdout"] = open(os.devnull, 'w')
- self.std_err = kwargs["stderr"] = open(os.devnull, 'w')
+ self.std_out = kwargs["stdout"] = open(os.devnull, "w")
+ self.std_err = kwargs["stderr"] = open(os.devnull, "w")
# Don't show console when frozen
kwargs["creationflags"] = subprocess.CREATE_NO_WINDOW if sys.platform == "win32" else 0
self._kernel_manager = _kernel_manager_factory.new_kernel_manager(
diff --git a/spine_engine/execution_managers/persistent_execution_manager.py b/spine_engine/execution_managers/persistent_execution_manager.py
index dc25bda7..57f86a62 100644
--- a/spine_engine/execution_managers/persistent_execution_manager.py
+++ b/spine_engine/execution_managers/persistent_execution_manager.py
@@ -147,7 +147,7 @@ def _start_persistent(self):
def _log_stdout(self):
"""Puts stdout from the process into the queue (it will be consumed by issue_command())."""
try:
- for line in iter(self._persistent.stdout.readline, b''):
+ for line in iter(self._persistent.stdout.readline, b""):
data = line.decode("UTF8", "replace").rstrip()
self._msg_queue.put(dict(type="stdout", data=data))
except ValueError:
@@ -156,7 +156,7 @@ def _log_stdout(self):
def _log_stderr(self):
"""Puts stderr from the process into the queue (it will be consumed by issue_command())."""
try:
- for line in iter(self._persistent.stderr.readline, b''):
+ for line in iter(self._persistent.stderr.readline, b""):
data = line.decode("UTF8", "replace").rstrip()
self._msg_queue.put(dict(type="stderr", data=data))
except ValueError:
@@ -337,8 +337,8 @@ def _communicate(self, request, *args, receive=True):
"""
if not self.is_persistent_alive():
raise PersistentIsDead()
- req_args_sep = '\u001f' # Unit separator
- args_sep = '\u0091' # Private Use 1
+ req_args_sep = "\u001f" # Unit separator
+ args_sep = "\u0091" # Private Use 1
args = args_sep.join(args)
msg = f"{request}{req_args_sep}{args}"
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
diff --git a/spine_engine/execution_managers/process_execution_manager.py b/spine_engine/execution_managers/process_execution_manager.py
index 82ae9f3e..e2be5750 100644
--- a/spine_engine/execution_managers/process_execution_manager.py
+++ b/spine_engine/execution_managers/process_execution_manager.py
@@ -71,14 +71,14 @@ def stop_execution(self):
self._process.terminate()
def _log_stdout(self, stdout):
- for line in iter(stdout.readline, b''):
+ for line in iter(stdout.readline, b""):
line = line.decode("UTF8", "replace").strip()
self._logger.msg_proc.emit(line)
self._logger.msg_standard_execution.emit({"type": "stdout", "data": line})
stdout.close()
def _log_stderr(self, stderr):
- for line in iter(stderr.readline, b''):
+ for line in iter(stderr.readline, b""):
line = line.decode("UTF8", "replace").strip()
self._logger.msg_proc_error.emit(line)
self._logger.msg_standard_execution.emit({"type": "stderr", "data": line})
diff --git a/spine_engine/execution_managers/spine_repl.py b/spine_engine/execution_managers/spine_repl.py
index 3c6dc463..bdc5be17 100644
--- a/spine_engine/execution_managers/spine_repl.py
+++ b/spine_engine/execution_managers/spine_repl.py
@@ -32,8 +32,8 @@ class SpineDBServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
class _RequestHandler(socketserver.BaseRequestHandler):
def handle(self):
data = self.request.recv(1024).decode("UTF8")
- req_args_sep = '\u001f' # Unit separator
- args_sep = '\u0091' # Private Use 1
+ req_args_sep = "\u001f" # Unit separator
+ args_sep = "\u0091" # Private Use 1
request, args = data.split(req_args_sep)
args = args.split(args_sep)
handler = {
diff --git a/spine_engine/server/engine_server.py b/spine_engine/server/engine_server.py
index f44658f9..21c9c706 100644
--- a/spine_engine/server/engine_server.py
+++ b/spine_engine/server/engine_server.py
@@ -57,9 +57,9 @@ def __init__(self, protocol, port, sec_model, sec_folder):
if not sec_folder:
raise ValueError("Path to security folder missing")
base_dir = sec_folder
- self.keys_dir = os.path.join(base_dir, 'certificates')
- self.public_keys_dir = os.path.join(base_dir, 'public_keys')
- self.secret_keys_dir = os.path.join(base_dir, 'private_keys')
+ self.keys_dir = os.path.join(base_dir, "certificates")
+ self.public_keys_dir = os.path.join(base_dir, "public_keys")
+ self.secret_keys_dir = os.path.join(base_dir, "private_keys")
if not os.path.exists(self.keys_dir):
raise ValueError(f"Security folder: {self.keys_dir} does not exist")
if not os.path.exists(self.public_keys_dir):
@@ -338,7 +338,7 @@ def enable_stonehouse_security(self, frontend):
allowed_str = "\n".join(allowed)
print(f"StoneHouse security activated. Allowed endpoints ({len(allowed)}):\n{allowed_str}")
# Tell the authenticator how to handle CURVE requests
- auth.configure_curve(domain='*', location=zmq.auth.CURVE_ALLOW_ANY)
+ auth.configure_curve(domain="*", location=zmq.auth.CURVE_ALLOW_ANY)
server_secret_file = os.path.join(self.secret_keys_dir, "server.key_secret")
server_public, server_secret = zmq.auth.load_certificate(server_secret_file)
frontend.curve_secretkey = server_secret
diff --git a/spine_engine/server/persistent_execution_service.py b/spine_engine/server/persistent_execution_service.py
index a1943c03..9178ce73 100644
--- a/spine_engine/server/persistent_execution_service.py
+++ b/spine_engine/server/persistent_execution_service.py
@@ -51,7 +51,7 @@ def run(self):
for msg in pm.issue_command(cmd, add_history=True, catch_exception=False):
json_msg = json.dumps(msg)
self.push_socket.send(json_msg.encode("utf-8")) # This blocks until somebody is pulling (receiving)
- self.push_socket.send(b'END')
+ self.push_socket.send(b"END")
retval_tuple = cmd_type, "ok"
elif cmd_type == "get_completions":
retval = pm.get_completions(cmd)
@@ -65,7 +65,7 @@ def run(self):
for msg in pm.restart_persistent():
json_msg = json.dumps(msg)
self.push_socket.send(json_msg.encode("utf-8"))
- self.push_socket.send(b'END')
+ self.push_socket.send(b"END")
retval_tuple = cmd_type, "ok"
elif cmd_type == "interrupt_persistent":
pm.interrupt_persistent()
diff --git a/spine_engine/server/util/server_message.py b/spine_engine/server/util/server_message.py
index 055be8e7..d934e90a 100644
--- a/spine_engine/server/util/server_message.py
+++ b/spine_engine/server/util/server_message.py
@@ -72,14 +72,14 @@ def toJSON(self):
jsonFileNames = self._getJSONFileNames()
retStr = ""
retStr += "{\n"
- retStr += " \"command\": \"" + self._command + "\",\n"
- retStr += " \"id\":\"" + self._id + "\",\n"
+ retStr += ' "command": "' + self._command + '",\n'
+ retStr += ' "id":"' + self._id + '",\n'
if len(self._data) == 0:
- retStr += " \"data\":\"\",\n"
+ retStr += ' "data":"",\n'
else:
- retStr += " \"data\":" + self._data + ",\n"
- retStr += " \"files\": " + jsonFileNames
+ retStr += ' "data":' + self._data + ",\n"
+ retStr += ' "files": ' + jsonFileNames
retStr += "}"
return retStr
@@ -87,13 +87,13 @@ def _getJSONFileNames(self):
fileNameCount = len(self._files)
if fileNameCount == 0:
return "{}\n"
- retStr = '{\n'
+ retStr = "{\n"
i = 0
for fName in self._files:
if i + 1 < fileNameCount:
- retStr = retStr + " \"name-" + str(i) + "\": \"" + fName + "\",\n"
+ retStr = retStr + ' "name-' + str(i) + '": "' + fName + '",\n'
else:
- retStr = retStr + " \"name-" + str(i) + "\": \"" + fName + "\"\n"
+ retStr = retStr + ' "name-' + str(i) + '": "' + fName + '"\n'
i += 1
retStr = retStr + " }\n"
return retStr
@@ -124,7 +124,7 @@ def parse(cls, message):
if len(filenames) > 0:
for f in filenames:
parsed_filenames.append(filenames[f])
- msg = cls(parsed_msg['command'], parsed_msg['id'], data, parsed_filenames)
+ msg = cls(parsed_msg["command"], parsed_msg["id"], data, parsed_filenames)
else:
- msg = cls(parsed_msg['command'], parsed_msg['id'], data, None)
+ msg = cls(parsed_msg["command"], parsed_msg["id"], data, None)
return msg
diff --git a/spine_engine/spine_engine.py b/spine_engine/spine_engine.py
index 57748fd3..995cf966 100644
--- a/spine_engine/spine_engine.py
+++ b/spine_engine/spine_engine.py
@@ -334,14 +334,14 @@ def _process_event(self, event):
if event.event_type == DagsterEventType.STEP_START:
direction, _, solid_name = event.solid_name.partition("_")
item_name = self._items_by_solids[solid_name]
- self._queue.put(('exec_started', {"item_name": item_name, "direction": direction}))
+ self._queue.put(("exec_started", {"item_name": item_name, "direction": direction}))
elif event.event_type == DagsterEventType.STEP_FAILURE and self._state != SpineEngineState.USER_STOPPED:
direction, _, solid_name = event.solid_name.partition("_")
item_name = self._items_by_solids[solid_name]
self._state = SpineEngineState.FAILED
self._queue.put(
(
- 'exec_finished',
+ "exec_finished",
{
"item_name": item_name,
"direction": direction,
@@ -367,7 +367,7 @@ def _process_event(self, event):
item_finish_state = ItemExecutionFinishState.SUCCESS
self._queue.put(
(
- 'exec_finished',
+ "exec_finished",
{
"item_name": item_name,
"direction": direction,
@@ -386,7 +386,7 @@ def _process_event(self, event):
item_finish_state = ItemExecutionFinishState[state_value]
self._queue.put(
(
- 'exec_finished',
+ "exec_finished",
{
"item_name": item_name,
"direction": direction,
@@ -409,7 +409,7 @@ def _stop_item(self, item):
item.stop_execution()
self._queue.put(
(
- 'exec_finished',
+ "exec_finished",
{
"item_name": item.name,
"direction": str(ED.FORWARD),
diff --git a/tests/execution_managers/test_kernel_execution_manager.py b/tests/execution_managers/test_kernel_execution_manager.py
index f59588a2..0d79cb44 100644
--- a/tests/execution_managers/test_kernel_execution_manager.py
+++ b/tests/execution_managers/test_kernel_execution_manager.py
@@ -59,7 +59,7 @@ def test_kernel_execution_manager(self):
self.assertEqual(0, _kernel_manager_factory.n_kernel_managers())
message_emits = logger.msg_kernel_execution.emit.call_args_list
expected_msg = {"type": "execution_started", "kernel_name": NATIVE_KERNEL_NAME}
- last_expected_msg = {"type": "stdout", "data": 'hello\n'}
+ last_expected_msg = {"type": "stdout", "data": "hello\n"}
# NOTE: In GitHub unittest runner on Ubuntu Python 3.9+ there is an extra warning message in message_emits
# This makes the number of message_emits inconsistent between Pythons so
# we don't do test self.assertEqual(5, len(message_emits))
@@ -89,7 +89,7 @@ def test_kernel_execution_manager_kill_completed(self):
self.assertEqual(0, _kernel_manager_factory.n_kernel_managers())
message_emits = logger.msg_kernel_execution.emit.call_args_list
expected_msg = {"type": "execution_started", "kernel_name": NATIVE_KERNEL_NAME}
- last_expected_msg = {'type': 'kernel_shutdown', 'kernel_name': 'python3'}
+ last_expected_msg = {"type": "kernel_shutdown", "kernel_name": "python3"}
self.assertEqual(expected_msg, message_emits[1][0][0])
self.assertEqual(last_expected_msg, message_emits[-1][0][0])
@@ -130,8 +130,8 @@ def test_kernel_manager_sharing(self):
logger1_message_emits = logger1.msg_kernel_execution.emit.call_args_list
logger2_message_emits = logger2.msg_kernel_execution.emit.call_args_list
expected_msg = {"type": "execution_started", "kernel_name": NATIVE_KERNEL_NAME}
- last_logger1_expected_msg = {'type': 'stdout', 'data': 'hello\n'}
- last_logger2_expected_msg = {'type': 'stdout', 'data': 'hello again\n'}
+ last_logger1_expected_msg = {"type": "stdout", "data": "hello\n"}
+ last_logger2_expected_msg = {"type": "stdout", "data": "hello again\n"}
self.assertEqual(expected_msg, logger1_message_emits[1][0][0])
self.assertEqual(last_logger1_expected_msg, logger1_message_emits[-1][0][0])
self.assertEqual(expected_msg, logger2_message_emits[1][0][0])
@@ -174,8 +174,8 @@ def test_two_kernel_managers(self):
logger1_message_emits = logger1.msg_kernel_execution.emit.call_args_list
logger2_message_emits = logger2.msg_kernel_execution.emit.call_args_list
expected_msg = {"type": "execution_started", "kernel_name": NATIVE_KERNEL_NAME}
- last_logger1_expected_msg = {'type': 'stdout', 'data': 'hello\n'}
- last_logger2_expected_msg = {'type': 'stdout', 'data': 'hello again\n'}
+ last_logger1_expected_msg = {"type": "stdout", "data": "hello\n"}
+ last_logger2_expected_msg = {"type": "stdout", "data": "hello again\n"}
self.assertEqual(expected_msg, logger1_message_emits[1][0][0])
self.assertEqual(last_logger1_expected_msg, logger1_message_emits[-1][0][0])
self.assertEqual(expected_msg, logger2_message_emits[1][0][0])
diff --git a/tests/execution_managers/test_process_execution_manager.py b/tests/execution_managers/test_process_execution_manager.py
index c54a01e1..b64d6178 100644
--- a/tests/execution_managers/test_process_execution_manager.py
+++ b/tests/execution_managers/test_process_execution_manager.py
@@ -67,5 +67,5 @@ def test_run_with_workdir(self):
self.assertEqual(ret, 0)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/project_item/test_project_item_resource.py b/tests/project_item/test_project_item_resource.py
index 3e35d5b4..9b735473 100644
--- a/tests/project_item/test_project_item_resource.py
+++ b/tests/project_item/test_project_item_resource.py
@@ -186,5 +186,5 @@ def test_transient_file_resource(self):
self.assertEqual(get_source_extras(resource), {})
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/server/util/test_EventDataConverter.py b/tests/server/util/test_EventDataConverter.py
index 958ea35a..1aea07f7 100644
--- a/tests/server/util/test_EventDataConverter.py
+++ b/tests/server/util/test_EventDataConverter.py
@@ -23,93 +23,93 @@
class TestEventDataConverter(unittest.TestCase):
def make_event_data(self):
test_events = [
- ('exec_started', {'item_name': 'helloworld', 'direction': 'BACKWARD'}),
- ('exec_started', {'item_name': 'Data Connection 1', 'direction': 'BACKWARD'}),
+ ("exec_started", {"item_name": "helloworld", "direction": "BACKWARD"}),
+ ("exec_started", {"item_name": "Data Connection 1", "direction": "BACKWARD"}),
(
- 'exec_finished',
+ "exec_finished",
{
- 'item_name': 'helloworld',
- 'direction': 'BACKWARD',
- 'state': 'RUNNING',
- 'item_state': ItemExecutionFinishState.SUCCESS,
+ "item_name": "helloworld",
+ "direction": "BACKWARD",
+ "state": "RUNNING",
+ "item_state": ItemExecutionFinishState.SUCCESS,
},
),
(
- 'exec_finished',
+ "exec_finished",
{
- 'item_name': 'Data Connection 1',
- 'direction': 'BACKWARD',
- 'state': 'RUNNING',
- 'item_state': ItemExecutionFinishState.SUCCESS,
+ "item_name": "Data Connection 1",
+ "direction": "BACKWARD",
+ "state": "RUNNING",
+ "item_state": ItemExecutionFinishState.SUCCESS,
},
),
- ('exec_started', {'item_name': 'Data Connection 1', 'direction': 'FORWARD'}),
+ ("exec_started", {"item_name": "Data Connection 1", "direction": "FORWARD"}),
(
- 'event_msg',
+ "event_msg",
{
- 'item_name': 'Data Connection 1',
- 'filter_id': '',
- 'msg_type': 'msg_success',
- 'msg_text': 'Executing Data Connection Data Connection 1 finished',
+ "item_name": "Data Connection 1",
+ "filter_id": "",
+ "msg_type": "msg_success",
+ "msg_text": "Executing Data Connection Data Connection 1 finished",
},
),
(
- 'exec_finished',
+ "exec_finished",
{
- 'item_name': 'Data Connection 1',
- 'direction': 'FORWARD',
- 'state': 'RUNNING',
- 'item_state': ItemExecutionFinishState.SUCCESS,
+ "item_name": "Data Connection 1",
+ "direction": "FORWARD",
+ "state": "RUNNING",
+ "item_state": ItemExecutionFinishState.SUCCESS,
},
),
- ('flash', {'item_name': 'from Data Connection 1 to helloworld'}),
- ('exec_started', {'item_name': 'helloworld', 'direction': 'FORWARD'}),
+ ("flash", {"item_name": "from Data Connection 1 to helloworld"}),
+ ("exec_started", {"item_name": "helloworld", "direction": "FORWARD"}),
(
- 'event_msg',
+ "event_msg",
{
- 'item_name': 'helloworld',
- 'filter_id': '',
- 'msg_type': 'msg',
- 'msg_text': "*** Executing Tool specification helloworld2 in source directory ***",
+ "item_name": "helloworld",
+ "filter_id": "",
+ "msg_type": "msg",
+ "msg_text": "*** Executing Tool specification helloworld2 in source directory ***",
},
),
(
- 'persistent_execution_msg',
+ "persistent_execution_msg",
{
- 'item_name': 'helloworld',
- 'filter_id': '',
- 'type': 'persistent_started',
- 'key': '6ceeb59271114fc2a0f787266f72dedc',
- 'language': 'python',
+ "item_name": "helloworld",
+ "filter_id": "",
+ "type": "persistent_started",
+ "key": "6ceeb59271114fc2a0f787266f72dedc",
+ "language": "python",
},
),
(
- 'persistent_execution_msg',
- {'item_name': 'helloworld', 'filter_id': '', 'type': 'stdin', 'data': '# Running python helloworld.py'},
+ "persistent_execution_msg",
+ {"item_name": "helloworld", "filter_id": "", "type": "stdin", "data": "# Running python helloworld.py"},
),
(
- 'persistent_execution_msg',
- {'item_name': 'helloworld', 'filter_id': '', 'type': 'stdout', 'data': 'helloo'},
+ "persistent_execution_msg",
+ {"item_name": "helloworld", "filter_id": "", "type": "stdout", "data": "helloo"},
),
(
- 'event_msg',
+ "event_msg",
{
- 'item_name': 'helloworld',
- 'filter_id': '',
- 'msg_type': 'msg',
- 'msg_text': "*** Archiving output files to results directory ***",
+ "item_name": "helloworld",
+ "filter_id": "",
+ "msg_type": "msg",
+ "msg_text": "*** Archiving output files to results directory ***",
},
),
(
- 'exec_finished',
+ "exec_finished",
{
- 'item_name': 'helloworld',
- 'direction': 'FORWARD',
- 'state': 'RUNNING',
- 'item_state': ItemExecutionFinishState.SUCCESS,
+ "item_name": "helloworld",
+ "direction": "FORWARD",
+ "state": "RUNNING",
+ "item_state": ItemExecutionFinishState.SUCCESS,
},
),
- ('dag_exec_finished', 'COMPLETED'),
+ ("dag_exec_finished", "COMPLETED"),
]
return test_events
@@ -146,5 +146,5 @@ def test_convert_deconvert(self):
self.assertEqual(expected_data, deconverted_events)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/server/util/test_ServerMessage.py b/tests/server/util/test_ServerMessage.py
index 176497dc..5a2a9545 100644
--- a/tests/server/util/test_ServerMessage.py
+++ b/tests/server/util/test_ServerMessage.py
@@ -22,149 +22,149 @@
class TestServerMessage(unittest.TestCase):
def make_engine_data1(self):
engine_data = {
- 'items': {
- 'T1': {
- 'type': 'Tool',
- 'description': '',
- 'x': -81.09856329675559,
- 'y': -7.8289158512399695,
- 'specification': 'a',
- 'execute_in_work': True,
- 'cmd_line_args': [],
+ "items": {
+ "T1": {
+ "type": "Tool",
+ "description": "",
+ "x": -81.09856329675559,
+ "y": -7.8289158512399695,
+ "specification": "a",
+ "execute_in_work": True,
+ "cmd_line_args": [],
},
- 'DC1': {
- 'type': 'Data Connection',
- 'description': '',
- 'x': -249.4143275244174,
- 'y': -122.2554094109619,
- 'file_references': [],
- 'db_references': [],
- 'db_credentials': {},
+ "DC1": {
+ "type": "Data Connection",
+ "description": "",
+ "x": -249.4143275244174,
+ "y": -122.2554094109619,
+ "file_references": [],
+ "db_references": [],
+ "db_credentials": {},
},
},
- 'specifications': {
- 'Tool': [
+ "specifications": {
+ "Tool": [
{
- 'name': 'a',
- 'tooltype': 'python',
- 'includes': ['a.py'],
- 'description': '',
- 'inputfiles': ['a.txt'],
- 'inputfiles_opt': [],
- 'outputfiles': [],
- 'cmdline_args': [],
- 'includes_main_path': '.',
- 'execution_settings': {
- 'env': '',
- 'kernel_spec_name': 'python38',
- 'use_jupyter_console': False,
- 'executable': '',
+ "name": "a",
+ "tooltype": "python",
+ "includes": ["a.py"],
+ "description": "",
+ "inputfiles": ["a.txt"],
+ "inputfiles_opt": [],
+ "outputfiles": [],
+ "cmdline_args": [],
+ "includes_main_path": ".",
+ "execution_settings": {
+ "env": "",
+ "kernel_spec_name": "python38",
+ "use_jupyter_console": False,
+ "executable": "",
},
- 'definition_file_path': 'C:\\Users\\ttepsa\\OneDrive - Teknologian Tutkimuskeskus VTT\\Documents\\SpineToolboxProjects\\remote test 2 dags\\.spinetoolbox\\specifications\\Tool\\a.json',
+ "definition_file_path": "C:\\Users\\ttepsa\\OneDrive - Teknologian Tutkimuskeskus VTT\\Documents\\SpineToolboxProjects\\remote test 2 dags\\.spinetoolbox\\specifications\\Tool\\a.json",
}
]
},
- 'connections': [{'name': 'from DC1 to T1', 'from': ['DC1', 'right'], 'to': ['T1', 'left']}],
- 'jumps': [],
- 'execution_permits': {'T1': True, 'DC1': True},
- 'items_module_name': 'spine_items',
- 'settings': {
- 'engineSettings/remoteExecutionEnabled': 'true',
- 'engineSettings/remoteHost': '192.168.56.69',
- 'engineSettings/remotePort': 50001,
- 'engineSettings/remoteSecurityFolder': '',
- 'engineSettings/remoteSecurityModel': '',
+ "connections": [{"name": "from DC1 to T1", "from": ["DC1", "right"], "to": ["T1", "left"]}],
+ "jumps": [],
+ "execution_permits": {"T1": True, "DC1": True},
+ "items_module_name": "spine_items",
+ "settings": {
+ "engineSettings/remoteExecutionEnabled": "true",
+ "engineSettings/remoteHost": "192.168.56.69",
+ "engineSettings/remotePort": 50001,
+ "engineSettings/remoteSecurityFolder": "",
+ "engineSettings/remoteSecurityModel": "",
},
- 'project_dir': 'C:/Users/ttepsa/OneDrive - Teknologian Tutkimuskeskus VTT/Documents/SpineToolboxProjects/remote test 2 dags',
+ "project_dir": "C:/Users/ttepsa/OneDrive - Teknologian Tutkimuskeskus VTT/Documents/SpineToolboxProjects/remote test 2 dags",
}
return engine_data
def make_engine_data2(self):
engine_data = {
- 'items': {
- 'Importer 1': {
- 'type': 'Importer',
- 'description': '',
- 'x': 72.45758726309028,
- 'y': -80.20321040425301,
- 'specification': 'Importer 1 - pekka_units.xlsx - 0',
- 'cancel_on_error': True,
- 'purge_before_writing': True,
- 'on_conflict': 'replace',
- 'file_selection': [['/pekka_units.xlsx', True]],
+ "items": {
+ "Importer 1": {
+ "type": "Importer",
+ "description": "",
+ "x": 72.45758726309028,
+ "y": -80.20321040425301,
+ "specification": "Importer 1 - pekka_units.xlsx - 0",
+ "cancel_on_error": True,
+ "purge_before_writing": True,
+ "on_conflict": "replace",
+ "file_selection": [["/pekka_units.xlsx", True]],
},
- 'DS1': {
- 'type': 'Data Store',
- 'description': '',
- 'x': 211.34193262411353,
- 'y': -152.99750295508272,
- 'url': {
- 'dialect': 'sqlite',
- 'username': '',
- 'password': '',
- 'host': '',
- 'port': '',
- 'database': {'type': 'path', 'relative': True, 'path': '.spinetoolbox/items/ds1/DS1.sqlite'},
+ "DS1": {
+ "type": "Data Store",
+ "description": "",
+ "x": 211.34193262411353,
+ "y": -152.99750295508272,
+ "url": {
+ "dialect": "sqlite",
+ "username": "",
+ "password": "",
+ "host": "",
+ "port": "",
+ "database": {"type": "path", "relative": True, "path": ".spinetoolbox/items/ds1/DS1.sqlite"},
},
},
- 'pekka data': {
- 'type': 'Data Connection',
- 'description': '',
- 'x': -78.23453014184398,
- 'y': -145.98354018912534,
- 'file_references': [],
- 'db_references': [],
- 'db_credentials': {},
+ "pekka data": {
+ "type": "Data Connection",
+ "description": "",
+ "x": -78.23453014184398,
+ "y": -145.98354018912534,
+ "file_references": [],
+ "db_references": [],
+ "db_credentials": {},
},
},
- 'specifications': {
- 'Importer': [
+ "specifications": {
+ "Importer": [
{
- 'name': 'Importer 1 - pekka_units.xlsx - 0',
- 'item_type': 'Importer',
- 'mapping': {
- 'table_mappings': {
- 'Sheet1': [
+ "name": "Importer 1 - pekka_units.xlsx - 0",
+ "item_type": "Importer",
+ "mapping": {
+ "table_mappings": {
+ "Sheet1": [
{
- 'map_type': 'ObjectClass',
- 'name': {'map_type': 'column', 'reference': 0},
- 'parameters': {'map_type': 'None'},
- 'skip_columns': [],
- 'read_start_row': 0,
- 'objects': {'map_type': 'column', 'reference': 1},
+ "map_type": "ObjectClass",
+ "name": {"map_type": "column", "reference": 0},
+ "parameters": {"map_type": "None"},
+ "skip_columns": [],
+ "read_start_row": 0,
+ "objects": {"map_type": "column", "reference": 1},
}
]
},
- 'table_options': {},
- 'table_types': {'Sheet1': {'0': 'string', '1': 'string'}},
- 'table_row_types': {},
- 'selected_tables': ['Sheet1'],
- 'source_type': 'ExcelConnector',
+ "table_options": {},
+ "table_types": {"Sheet1": {"0": "string", "1": "string"}},
+ "table_row_types": {},
+ "selected_tables": ["Sheet1"],
+ "source_type": "ExcelConnector",
},
- 'description': None,
- 'definition_file_path': 'C:\\Users\\ttepsa\\OneDrive - Teknologian Tutkimuskeskus VTT\\Documents\\SpineToolboxProjects\\Simple Importer\\Importer 1 - pekka_units.xlsx - 0.json',
+ "description": None,
+ "definition_file_path": "C:\\Users\\ttepsa\\OneDrive - Teknologian Tutkimuskeskus VTT\\Documents\\SpineToolboxProjects\\Simple Importer\\Importer 1 - pekka_units.xlsx - 0.json",
}
]
},
- 'connections': [
+ "connections": [
{
- 'name': 'from pekka data to Importer 1',
- 'from': ['pekka data', 'right'],
- 'to': ['Importer 1', 'left'],
+ "name": "from pekka data to Importer 1",
+ "from": ["pekka data", "right"],
+ "to": ["Importer 1", "left"],
},
- {'name': 'from Importer 1 to DS1', 'from': ['Importer 1', 'right'], 'to': ['DS1', 'left']},
+ {"name": "from Importer 1 to DS1", "from": ["Importer 1", "right"], "to": ["DS1", "left"]},
],
- 'jumps': [],
- 'execution_permits': {'Importer 1': True, 'DS1': True, 'pekka data': True},
- 'items_module_name': 'spine_items',
- 'settings': {
- 'engineSettings/remoteExecutionEnabled': 'true',
- 'engineSettings/remoteHost': '192.168.56.69',
- 'engineSettings/remotePort': 50001,
- 'engineSettings/remoteSecurityFolder': '',
- 'engineSettings/remoteSecurityModel': '',
+ "jumps": [],
+ "execution_permits": {"Importer 1": True, "DS1": True, "pekka data": True},
+ "items_module_name": "spine_items",
+ "settings": {
+ "engineSettings/remoteExecutionEnabled": "true",
+ "engineSettings/remoteHost": "192.168.56.69",
+ "engineSettings/remotePort": 50001,
+ "engineSettings/remoteSecurityFolder": "",
+ "engineSettings/remoteSecurityModel": "",
},
- 'project_dir': 'C:/Users/ttepsa/OneDrive - Teknologian Tutkimuskeskus VTT/Documents/SpineToolboxProjects/Simple Importer',
+ "project_dir": "C:/Users/ttepsa/OneDrive - Teknologian Tutkimuskeskus VTT/Documents/SpineToolboxProjects/Simple Importer",
}
return engine_data
@@ -226,5 +226,5 @@ def test_msg_nofiles(self):
self.assertEqual(len(msg.getFileNames()), 0)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_load_project_items.py b/tests/test_load_project_items.py
index 4df24798..f3486aa1 100644
--- a/tests/test_load_project_items.py
+++ b/tests/test_load_project_items.py
@@ -42,5 +42,5 @@ def test_load_item_specification_factories(self):
self.assertTrue(issubclass(factories["TestItem"], ProjectItemSpecificationFactory))
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/test_spine_engine.py b/tests/test_spine_engine.py
index 03665d16..3bd7b83e 100644
--- a/tests/test_spine_engine.py
+++ b/tests/test_spine_engine.py
@@ -66,7 +66,7 @@ def _mock_item(
resources_backward = []
item = NonCallableMagicMock()
item.name = name
- item.short_name = name.lower().replace(' ', '_')
+ item.short_name = name.lower().replace(" ", "_")
item.execute.return_value = execute_outcome
item.exclude_execution = MagicMock()
for r in resources_forward + resources_backward:
@@ -81,7 +81,7 @@ def _mock_item(
def _default_forward_url_resource(url, predecessor_name):
resource = _make_url_resource(url)
resource.provider_name = predecessor_name
- resource.metadata = {'filter_id': '', 'filter_stack': ()}
+ resource.metadata = {"filter_id": "", "filter_stack": ()}
return resource
@staticmethod
@@ -853,5 +853,5 @@ def test_bug(self):
self.fail("validate_single_jump shouldn't have raised")
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py
index a64239dd..9e8c48b7 100644
--- a/tests/utils/test_helpers.py
+++ b/tests/utils/test_helpers.py
@@ -188,5 +188,5 @@ def test_get_file_size(self):
self.assertEqual(expected_output, output)
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()
diff --git a/tests/utils/test_serialization.py b/tests/utils/test_serialization.py
index 0028f80d..e635ebab 100644
--- a/tests/utils/test_serialization.py
+++ b/tests/utils/test_serialization.py
@@ -144,5 +144,5 @@ def test_unix_paths(self):
self.assertFalse(path_in_dir("/path/to/my/file.dat", "/another/path"))
-if __name__ == '__main__':
+if __name__ == "__main__":
unittest.main()