From f40e4c25734e00e7f56e544e99f4c9ac5c01c9eb Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Mon, 9 Dec 2024 15:54:33 +0100 Subject: [PATCH] fix tests adapt output names of files from statistics.py expand the variables from the yaml file. might need to remove because it could cause problems with codes that are defined on HPC change labels a bit for tasks, TODO need to do the same for data --- src/sirocco/parsing/_yaml_data_models.py | 7 +++++++ src/sirocco/workgraph.py | 7 ++++--- tests/files/configs/test_config_parameters.yml | 9 +++++---- tests/files/scripts/icon.py | 2 +- 4 files changed, 17 insertions(+), 8 deletions(-) diff --git a/src/sirocco/parsing/_yaml_data_models.py b/src/sirocco/parsing/_yaml_data_models.py index 9ec7db8..00ecb01 100644 --- a/src/sirocco/parsing/_yaml_data_models.py +++ b/src/sirocco/parsing/_yaml_data_models.py @@ -1,5 +1,6 @@ from __future__ import annotations +from os.path import expandvars import time from datetime import datetime from pathlib import Path @@ -273,6 +274,12 @@ class ConfigTaskShell(ConfigTaskBase): input_arg_options: dict[str, str] = Field(default_factory=dict) src: str | None = None + @field_validator("command", "command_option", "src") + @classmethod + def expand_var(cls, value: str | None) -> str | None: + """Expand environemnt variables""" + # TODO this might be not intended if we want to use environment variables on remote HPC + return None if value is None else expandvars(value) class ConfigTaskIcon(ConfigTaskBase): plugin: Literal["icon"] diff --git a/src/sirocco/workgraph.py b/src/sirocco/workgraph.py index bfa19b7..693c299 100644 --- a/src/sirocco/workgraph.py +++ b/src/sirocco/workgraph.py @@ -126,7 +126,7 @@ def parse_to_aiida_label(label: str) -> str: def get_aiida_label_from_unrolled_data(obj: core.BaseNode) -> str: """ """ return AiidaWorkGraph.parse_to_aiida_label( - f"{obj.name}" + "_".join(f"_{key}_{value}" for key, value in obj.coordinates.items()) + f"{obj.name}" + "__".join(f"_{key}_{value}" for key, value in obj.coordinates.items()) ) @staticmethod @@ -136,9 +136,10 @@ def get_aiida_label_from_unrolled_task(obj: core.BaseNode) -> str: # so do we check somewhere that a task is not used in multiple cycles? # Otherwise the label is not unique # --> task name + date + parameters - return AiidaWorkGraph.parse_to_aiida_label( - f"{obj.name}" + "_".join(f"_{key}_{value}" for key, value in obj.coordinates.items()) + label = AiidaWorkGraph.parse_to_aiida_label( + "__".join([f"{obj.name}"] + [f"_{key}_{value}" for key, value in obj.coordinates.items()]) ) + return label def _add_aiida_input_data_node(self, input_: core.UnrolledData): """ diff --git a/tests/files/configs/test_config_parameters.yml b/tests/files/configs/test_config_parameters.yml index 82f57a8..86f4315 100644 --- a/tests/files/configs/test_config_parameters.yml +++ b/tests/files/configs/test_config_parameters.yml @@ -26,6 +26,7 @@ cycles: inputs: - icon_output: parameters: + # PR COMMENT: Is this the intention that multiple files can be taken by statistics.py here? it gets the icon_output for all the foo values bar: single outputs: [analysis_foo] - statistics_foo_bar: @@ -73,7 +74,7 @@ data: generated: - icon_output: type: file - src: output + src: icon_output parameters: [foo, bar] - icon_restart: type: file @@ -81,14 +82,14 @@ data: parameters: [foo, bar] - analysis_foo: type: file - src: analysis_foo + src: analysis parameters: [bar] - analysis_foo_bar: type: file - src: foo_analysis_bar + src: analysis - yearly_analysis: type: file - src: yearly_analysis + src: analysis parameters: foo: [0, 1, 2] diff --git a/tests/files/scripts/icon.py b/tests/files/scripts/icon.py index 7ec029b..ded4876 100755 --- a/tests/files/scripts/icon.py +++ b/tests/files/scripts/icon.py @@ -31,7 +31,7 @@ def main(): args = parser.parse_args() - output = Path('output') + output = Path('icon_output') output.write_text("") if args.restart and args.init: