From 727e4cf291b5227c806fade1b0a5dd7b5528d4fe Mon Sep 17 00:00:00 2001 From: "Yashasvi S. Ranawat" Date: Wed, 8 Nov 2023 11:13:10 +0200 Subject: [PATCH] refactors RUN_PY as a file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Makes RUN_PY a part of linting and testing, as suggested by Sebastian Björkqvist --- runner/runner.py | 20 ++++++++++---------- runner/utils/__init__.py | 2 -- runner/utils/run.py | 35 +++++++++++++++++++++++++++++++++++ runner/utils/runnerdata.py | 5 +++++ runner/utils/utils.py | 26 -------------------------- 5 files changed, 50 insertions(+), 38 deletions(-) create mode 100644 runner/utils/run.py diff --git a/runner/runner.py b/runner/runner.py index c862a7e..4e2273e 100644 --- a/runner/runner.py +++ b/runner/runner.py @@ -13,7 +13,7 @@ from abc import ABC, abstractmethod from ase import db from ase import Atoms -from runner.utils import Cd, RUN_PY +from runner.utils import Cd, run from runner.utils.runnerdata import RunnerData logger = logging.getLogger(__name__) @@ -27,9 +27,6 @@ logger.addHandler(stream_handler) -default_files = ["run.sh", "batch.slrm", "atoms.pkl"] - - class BaseRunner(ABC): """ Runner runs tasks @@ -494,6 +491,9 @@ def _write_run_data(self, atoms, tasks, files, status, log_msg): with open("atoms.pkl", "wb") as file_o: pickle.dump(atoms, file_o) + # copy run file + shutil.copyfile(run.__file__, "run.py") + # write run scripts run_scripts = [] py_run = 0 @@ -509,29 +509,29 @@ def _write_run_data(self, atoms, tasks, files, status, log_msg): shell_run = "python" if len(task) > 3: shell_run = task[3] - shell_run += " run{}.py".format(py_run) - shell_run += " > run{}.out".format(py_run) - run_scripts.append(shell_run) if len(task) > 2: params = task[2] else: params = {} + # write params try: with open("params{}.json".format(py_run), "w") as file_o: json.dump(params, file_o) except TypeError as err: status = "failed" - log_msg = "{}\n Error writing params: " "{}\n".format( + log_msg = "{}\n Error writing params: {}\n".format( datetime.now(), err.args[0] ) break # making python executable func_name = task[1] func_name = func_name[:-3] if func_name.endswith(".py") else func_name - with open("run{}.py".format(py_run), "w") as file_o: - file_o.write(RUN_PY.format(func=func_name, ind=py_run)) + + # add to run_scripts + shell_run += f" run.py {func_name} {py_run} > run{py_run}.out" + run_scripts.append(shell_run) py_run += 1 return run_scripts, status, log_msg diff --git a/runner/utils/__init__.py b/runner/utils/__init__.py index a3260be..8ae7ea9 100644 --- a/runner/utils/__init__.py +++ b/runner/utils/__init__.py @@ -2,7 +2,6 @@ from runner.utils.utils import ( Cd, json_keys2int, - RUN_PY, get_status, submit, cancel, @@ -14,7 +13,6 @@ __all__ = [ - "RUN_PY", "Cd", "json_keys2int", "get_status", diff --git a/runner/utils/run.py b/runner/utils/run.py new file mode 100644 index 0000000..0f94765 --- /dev/null +++ b/runner/utils/run.py @@ -0,0 +1,35 @@ +""" +python file to run python tasks +""" + +import json +import pickle +import argparse + + +def main(): + # parse args + parser = argparse.ArgumentParser(description="Run function with params.") + parser.add_argument("func", type=str) + parser.add_argument("indx", type=int) + args = parser.parse_args() + + # import module + func = __import__(args.func) + + # open params and atoms + with open(f"params{args.indx}.json") as fio: + params = json.load(fio) + with open("atoms.pkl", "rb") as fio: + atoms = pickle.load(fio) + + # run func + atoms = func.main(atoms, **params) + + # write atoms + with open("atoms.pkl", "wb") as fio: + pickle.dump(atoms, fio) + + +if __name__ == "__main__": + main() diff --git a/runner/utils/runnerdata.py b/runner/utils/runnerdata.py index f6eeefb..5059cf6 100644 --- a/runner/utils/runnerdata.py +++ b/runner/utils/runnerdata.py @@ -7,6 +7,9 @@ from runner.utils.utils import json_keys2int, get_db_connect +default_files = ["run.sh", "batch.slrm", "atoms.pkl", "run.py", "status.txt", "job.id"] + + class RunnerData: """Class to handle runner data using helper function @@ -372,6 +375,8 @@ def _test_files(files, log_msg=""): err = log_msg + "Runner: files should be a dictionary\n" raise RuntimeError(err) for filename, content in files.items(): + if filename in default_files: + raise RuntimeError(log_msg + f"Runner: {filename=} in {default_files=}") if not isinstance(filename, str): err = log_msg + "Runner: filenames should be str\n" raise RuntimeError(err) diff --git a/runner/utils/utils.py b/runner/utils/utils.py index 830782d..dfefe4a 100644 --- a/runner/utils/utils.py +++ b/runner/utils/utils.py @@ -5,32 +5,6 @@ import ase.db as db -RUN_PY = """ -import json -import pickle -from ase.atoms import Atoms -from {func} import main - -def json_keys2int(x): - # if dict key can be converted to int - # then convert to int - if isinstance(x, dict): - try: - return {{int(k):v for k,v in x.items()}} - except ValueError: - pass - return x - -with open("params{ind}.json") as f: - params = json.load(f, object_hook=json_keys2int) -with open("atoms.pkl", "rb") as f: - atoms = pickle.load(f) -atoms = main(atoms, **params) -with open("atoms.pkl", "wb") as f: - pickle.dump(atoms, f) -""" - - class Cd: """Context manager for changing the current working directory