From 1298f45fd20640e79d58194c3a9f5b396ab8a7bf Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Wed, 11 Sep 2024 21:23:21 +0200 Subject: [PATCH 01/39] WIP: run notebooks on RTD --- .readthedocs.yml | 16 +++++++++++++++- docs/conf.py | 5 +++-- environment.yml | 8 ++++++++ 3 files changed, 26 insertions(+), 3 deletions(-) create mode 100644 environment.yml diff --git a/.readthedocs.yml b/.readthedocs.yml index 0fd6f0e3..b443c179 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -7,7 +7,18 @@ version: 2 build: os: "ubuntu-22.04" tools: - python: "3.12" + python: "miniconda3-3.12-24.1" # note that libmamba-solver is available since 22.1 + jobs: + post_create_environment: + - rabbitmq-server -detached + - sleep 10 + - rabbitmq-diagnostics status + - verdi presto + - verdi daemon start + - verdi status + - aiida-pseudo install sssp -x PBEsol + - verdi group list + - cat /proc/cpuinfo | grep processor | wc -l # Build documentation in the docs/ directory with Sphinx sphinx: @@ -19,6 +30,9 @@ sphinx: # See https://docs.readthedocs.io/en/latest/yaml-config.html#formats formats: [] +conda: + environment: environment.yml + # Optionally set the version of Python and requirements required to build your docs python: install: diff --git a/docs/conf.py b/docs/conf.py index 3cf19061..b2fb4ba6 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -399,8 +399,8 @@ def setup(app): app.add_css_file("css/custom.css") -# We are not installing a full aiida environment -nb_execution_mode = "off" +nb_execution_mode = "on" +nb_execution_mode = "auto" # Intersphinx configuration intersphinx_mapping = { @@ -408,6 +408,7 @@ def setup(app): "plumpy": ("https://plumpy.readthedocs.io/en/latest/", None), } + # Compile all things needed before building the docs # For instance, convert the notebook templates to actual tutorial and solution versions print( diff --git a/environment.yml b/environment.yml new file mode 100644 index 00000000..1e6d34f0 --- /dev/null +++ b/environment.yml @@ -0,0 +1,8 @@ +name: base +channels: + - conda-forge + - defaults +dependencies: + - aiida-core + - aiida-core.services + - qe From cd0c49ba21706909968eb406e8ffc64c7732cfcc Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Wed, 11 Sep 2024 21:29:33 +0200 Subject: [PATCH 02/39] add deps --- requirements.txt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/requirements.txt b/requirements.txt index be019ba1..a543d241 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,3 +5,6 @@ sphinx-copybutton~=0.5.2 sphinx-panels~=0.4.1 sphinx-tabs~=3.4.5 myst-nb~=1.1.1 +# to run notebooks +aiida-quantumespresso +aiida-pseudo From 86316430a00c6e6d41419aaa8dd40eb54431d1ce Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Wed, 11 Sep 2024 21:44:42 +0200 Subject: [PATCH 03/39] add installation of req --- .readthedocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.readthedocs.yml b/.readthedocs.yml index b443c179..eb29b949 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -10,6 +10,7 @@ build: python: "miniconda3-3.12-24.1" # note that libmamba-solver is available since 22.1 jobs: post_create_environment: + - python -m pip install --exists-action=w --no-cache-dir -r requirements.txt - rabbitmq-server -detached - sleep 10 - rabbitmq-diagnostics status From f25fe8e39195587018c86c8720910b0fb5f0c513 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 04:07:04 +0200 Subject: [PATCH 04/39] introduce SPHINX_LINKCHECK to not execute for linkchecks, ignore querying.ipynb since it depends on the execution of other notebooks --- .github/workflows/ci.yml | 1 + docs/conf.py | 6 ++++-- requirements.txt | 1 + 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 75516dd8..e2d5d1e7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,3 +37,4 @@ jobs: make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going + SPHINX_LINKCHECK: on diff --git a/docs/conf.py b/docs/conf.py index b2fb4ba6..0f905222 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -399,8 +399,10 @@ def setup(app): app.add_css_file("css/custom.css") -nb_execution_mode = "on" -nb_execution_mode = "auto" +# we don't want to run the notebook during a linkcheck + +nb_execution_mode = "off" if os.getenv("SPHINX_LINKCHECK") is None else "auto" +nb_execution_excludepatterns = ["querying.ipynb"] # Intersphinx configuration intersphinx_mapping = { diff --git a/requirements.txt b/requirements.txt index a543d241..7fcb6988 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,3 +8,4 @@ myst-nb~=1.1.1 # to run notebooks aiida-quantumespresso aiida-pseudo +aiida-workgraph From 7430793975b9e013e0f48f79862b5692c9257b65 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 04:11:25 +0200 Subject: [PATCH 05/39] try to specify requirements in conda environment.yml --- .readthedocs.yml | 8 ++++---- environment.yml | 3 +++ 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index eb29b949..e650775d 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -10,7 +10,7 @@ build: python: "miniconda3-3.12-24.1" # note that libmamba-solver is available since 22.1 jobs: post_create_environment: - - python -m pip install --exists-action=w --no-cache-dir -r requirements.txt + # - python -m pip install --exists-action=w --no-cache-dir -r requirements.txt - rabbitmq-server -detached - sleep 10 - rabbitmq-diagnostics status @@ -35,6 +35,6 @@ conda: environment: environment.yml # Optionally set the version of Python and requirements required to build your docs -python: - install: - - requirements: requirements.txt +#python: +# install: +# - requirements: requirements.txt diff --git a/environment.yml b/environment.yml index 1e6d34f0..d7f7ca5a 100644 --- a/environment.yml +++ b/environment.yml @@ -6,3 +6,6 @@ dependencies: - aiida-core - aiida-core.services - qe + - pip + - pip: + - -r requirements.txt From 4c204df2a6e037f8ddd3c91dbd79d7ccab419aa3 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 04:41:44 +0200 Subject: [PATCH 06/39] integrate workgraph tutorials --- docs/index.rst | 1 + .../eos.ipynb | 361 ++++++++++ .../index.rst | 97 +++ .../writing_workflows_with_workgraph/qe.ipynb | 476 +++++++++++++ .../zero_to_hero.ipynb | 658 ++++++++++++++++++ 5 files changed, 1593 insertions(+) create mode 100644 docs/sections/writing_workflows_with_workgraph/eos.ipynb create mode 100644 docs/sections/writing_workflows_with_workgraph/index.rst create mode 100644 docs/sections/writing_workflows_with_workgraph/qe.ipynb create mode 100644 docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb diff --git a/docs/index.rst b/docs/index.rst index 8ea7495e..14016ac6 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -14,6 +14,7 @@ The material is divided in 5 units: sections/running_processes/index sections/managing_data/index sections/writing_workflows/index + sections/writing_workflows_with_workgraph/index sections/creating_plugins/index These are also accessible via the sidebar on the left. diff --git a/docs/sections/writing_workflows_with_workgraph/eos.ipynb b/docs/sections/writing_workflows_with_workgraph/eos.ipynb new file mode 100644 index 00000000..6408030d --- /dev/null +++ b/docs/sections/writing_workflows_with_workgraph/eos.ipynb @@ -0,0 +1,361 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "7e9c7e1d", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "==================================\n", + "Equation of state (EOS) WorkGraph\n", + "==================================\n", + "\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ce0adaf6", + "metadata": {}, + "outputs": [], + "source": [ + "# To run this tutorial, you need to install aiida-workgraph and restart the daemon. Open a terminal and run:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# pip install aiida-workgraph[widget] aiida-quantumespresso\n", + "#\n", + "# Restart (or start) the AiiDA daemon if needed:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# verdi daemon restart\n", + "#\n", + "# Create the calcfunction task\n", + "# ============================\n", + "#\n", + "\n", + "\n", + "from aiida import orm\n", + "from aiida_workgraph import task\n", + "\n", + "#\n", + "# explicitly define the output socket name to match the return value of the function\n", + "@task.calcfunction(outputs=[{\"name\": \"structures\"}])\n", + "def scale_structure(structure, scales):\n", + " \"\"\"Scale the structure by the given scales.\"\"\"\n", + " atoms = structure.get_ase()\n", + " structures = {}\n", + " for i in range(len(scales)):\n", + " atoms1 = atoms.copy()\n", + " atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True)\n", + " structure = orm.StructureData(ase=atoms1)\n", + " structures[f\"s_{i}\"] = structure\n", + " return {\"structures\": structures}\n", + "\n", + "\n", + "#\n", + "# Output result from context to the output socket\n", + "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"context.result\"}])\n", + "def all_scf(structures, scf_inputs):\n", + " \"\"\"Run the scf calculation for each structure.\"\"\"\n", + " from aiida_workgraph import WorkGraph\n", + " from aiida_quantumespresso.calculations.pw import PwCalculation\n", + "\n", + " wg = WorkGraph()\n", + " for key, structure in structures.items():\n", + " pw1 = wg.add_task(PwCalculation, name=f\"pw1_{key}\", structure=structure)\n", + " pw1.set(scf_inputs)\n", + " # save the output parameters to the context\n", + " pw1.set_context({\"output_parameters\": f\"result.{key}\"})\n", + " return wg\n", + "\n", + "\n", + "#\n", + "\n", + "\n", + "@task.calcfunction()\n", + "# because this is a calcfunction, and the input datas are dynamic, we need use **datas.\n", + "def eos(**datas):\n", + " \"\"\"Fit the EOS of the data.\"\"\"\n", + " from ase.eos import EquationOfState\n", + "\n", + " #\n", + " volumes = []\n", + " energies = []\n", + " for _, data in datas.items():\n", + " volumes.append(data.dict.volume)\n", + " energies.append(data.dict.energy)\n", + " unit = data.dict.energy_units\n", + " #\n", + " eos = EquationOfState(volumes, energies)\n", + " v0, e0, B = eos.fit()\n", + " eos = orm.Dict({\"unit\": unit, \"v0\": v0, \"e0\": e0, \"B\": B})\n", + " return eos" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "606b1848", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Build the workgraph\n", + "# ===================\n", + "# Three steps:\n", + "#\n", + "# - create an empty WorkGraph\n", + "# - add tasks: scale_structure, all_scf and eos.\n", + "# - link the output and input sockets for the tasks.\n", + "#\n", + "# Visualize the workgraph\n", + "# -----------------------\n", + "# If you are running in a jupyter notebook, you can visualize the workgraph directly.\n", + "#\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "\n", + "#\n", + "wg = WorkGraph(\"eos\")\n", + "scale_structure1 = wg.add_task(scale_structure, name=\"scale_structure1\")\n", + "all_scf1 = wg.add_task(all_scf, name=\"all_scf1\")\n", + "eos1 = wg.add_task(eos, name=\"eos1\")\n", + "wg.add_link(scale_structure1.outputs[\"structures\"], all_scf1.inputs[\"structures\"])\n", + "wg.add_link(all_scf1.outputs[\"result\"], eos1.inputs[\"datas\"])\n", + "wg.to_html()\n", + "# visualize the workgraph in jupyter-notebook\n", + "# wg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a9d5c260", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Prepare inputs and run\n", + "# ----------------------\n", + "#\n", + "\n", + "\n", + "from aiida import load_profile\n", + "from aiida.common.exceptions import NotExistent\n", + "from aiida.orm import (\n", + " Dict,\n", + " KpointsData,\n", + " StructureData,\n", + " load_code,\n", + " load_group,\n", + " InstalledCode,\n", + " load_computer,\n", + ")\n", + "from ase.build import bulk\n", + "\n", + "#\n", + "load_profile()\n", + "# create pw code\n", + "try:\n", + " pw_code = load_code(\n", + " \"qe-7.2-pw@localhost\"\n", + " ) # The computer label can also be omitted here\n", + "except NotExistent:\n", + " pw_code = InstalledCode(\n", + " computer=load_computer(\"localhost\"),\n", + " filepath_executable=\"pw.x\",\n", + " label=\"qe-7.2-pw\",\n", + " default_calc_job_plugin=\"quantumespresso.pw\",\n", + " ).store()\n", + "#\n", + "si = orm.StructureData(ase=bulk(\"Si\"))\n", + "pw_paras = Dict(\n", + " {\n", + " \"CONTROL\": {\n", + " \"calculation\": \"scf\",\n", + " },\n", + " \"SYSTEM\": {\n", + " \"ecutwfc\": 30,\n", + " \"ecutrho\": 240,\n", + " \"occupations\": \"smearing\",\n", + " \"smearing\": \"gaussian\",\n", + " \"degauss\": 0.1,\n", + " },\n", + " }\n", + ")\n", + "# Load the pseudopotential family.\n", + "pseudo_family = load_group(\"SSSP/1.3/PBEsol/efficiency\")\n", + "pseudos = pseudo_family.get_pseudos(structure=si)\n", + "#\n", + "metadata = {\n", + " \"options\": {\n", + " \"resources\": {\n", + " \"num_machines\": 1,\n", + " \"num_mpiprocs_per_machine\": 1,\n", + " },\n", + " }\n", + "}\n", + "#\n", + "kpoints = orm.KpointsData()\n", + "kpoints.set_kpoints_mesh([3, 3, 3])\n", + "pseudos = pseudo_family.get_pseudos(structure=si)\n", + "scf_inputs = {\n", + " \"code\": pw_code,\n", + " \"parameters\": pw_paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + "}\n", + "# -------------------------------------------------------\n", + "# set the input parameters for each task\n", + "wg.tasks[\"scale_structure1\"].set({\"structure\": si, \"scales\": [0.95, 1.0, 1.05]})\n", + "wg.tasks[\"all_scf1\"].set({\"scf_inputs\": scf_inputs})\n", + "print(\"Waiting for the workgraph to finish...\")\n", + "wg.submit(wait=True, timeout=300)\n", + "# one can also run the workgraph directly\n", + "# wg.run()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "be53a0ec", + "metadata": {}, + "outputs": [], + "source": [ + "# Print out the results:\n", + "#\n", + "\n", + "\n", + "data = wg.tasks[\"eos1\"].outputs[\"result\"].value.get_dict()\n", + "print(\"B: {B}\\nv0: {v0}\\ne0: {e0}\\nv0: {v0}\".format(**data))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4e68219c", + "metadata": {}, + "outputs": [], + "source": [ + "# Use graph builder\n", + "# =================\n", + "# The Graph Builder allow user to create a dynamic workflow based on the input value, as well as nested workflows.\n", + "#\n", + "\n", + "from aiida_workgraph import WorkGraph, task\n", + "\n", + "#\n", + "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"eos1.result\"}])\n", + "def eos_workgraph(structure=None, scales=None, scf_inputs=None):\n", + " wg = WorkGraph(\"eos\")\n", + " scale_structure1 = wg.add_task(\n", + " scale_structure, name=\"scale_structure1\", structure=structure, scales=scales\n", + " )\n", + " all_scf1 = wg.add_task(all_scf, name=\"all_scf1\", scf_inputs=scf_inputs)\n", + " eos1 = wg.add_task(eos, name=\"eos1\")\n", + " wg.add_link(scale_structure1.outputs[\"structures\"], all_scf1.inputs[\"structures\"])\n", + " wg.add_link(all_scf1.outputs[\"result\"], eos1.inputs[\"datas\"])\n", + " return wg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4062238", + "metadata": {}, + "outputs": [], + "source": [ + "# Then we can use the `eos_workgraph` in two ways:\n", + "#\n", + "# - Direct run the function and generate the workgraph, then submit\n", + "# - Use it as a task inside another workgraph to create nested workflow.\n", + "#\n", + "# Use the graph builder directly\n", + "# ------------------------------\n", + "#\n", + "\n", + "wg = eos_workgraph(structure=si, scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs)\n", + "# One can submit the workgraph directly\n", + "# wg.submit(wait=True, timeout=300)\n", + "wg.to_html()\n", + "# visualize the workgraph in jupyter-notebook\n", + "# wg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8aad9026", + "metadata": {}, + "outputs": [], + "source": [ + "# Use it inside another workgraph\n", + "# -------------------------------\n", + "# For example, we want to combine relax with eos.\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "from copy import deepcopy\n", + "from aiida_quantumespresso.calculations.pw import PwCalculation\n", + "\n", + "#\n", + "# -------------------------------------------------------\n", + "relax_pw_paras = deepcopy(pw_paras)\n", + "relax_pw_paras[\"CONTROL\"][\"calculation\"] = \"vc-relax\"\n", + "relax_inputs = {\n", + " \"structure\": si,\n", + " \"code\": pw_code,\n", + " \"parameters\": relax_pw_paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + "}\n", + "# -------------------------------------------------------\n", + "wg = WorkGraph(\"relax_eos\")\n", + "relax_task = wg.add_task(PwCalculation, name=\"relax1\")\n", + "relax_task.set(relax_inputs)\n", + "eos_wg_task = wg.add_task(\n", + " eos_workgraph, name=\"eos1\", scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs\n", + ")\n", + "wg.add_link(relax_task.outputs[\"output_structure\"], eos_wg_task.inputs[\"structure\"])\n", + "# -------------------------------------------------------\n", + "# One can submit the workgraph directly\n", + "# wg.submit(wait=True, timeout=300)\n", + "\n", + "wg.to_html()\n", + "# visualize the workgraph in jupyter-notebook\n", + "# wg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf39d602", + "metadata": {}, + "outputs": [], + "source": [ + "# Summary\n", + "# =======\n", + "# There are many ways to create the workflow using graph builder. For example, one can add the relax step inside the `eos_workgraph`, and add a `run_relax` argument to control the logic." + ] + } + ], + "metadata": { + "jupytext": { + "cell_metadata_filter": "-all", + "main_language": "python", + "notebook_metadata_filter": "-all" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/sections/writing_workflows_with_workgraph/index.rst b/docs/sections/writing_workflows_with_workgraph/index.rst new file mode 100644 index 00000000..8e7c2593 --- /dev/null +++ b/docs/sections/writing_workflows_with_workgraph/index.rst @@ -0,0 +1,97 @@ +Writing workflows +================= + +.. rst-class:: header-text + + Recently a new way to create workflows has been developed, the *workgraph*. + The workgraph should simplify the creation of the and provide a more user + friendly GUI that provides information about your workflow before + execution. In this section you will learn how to write different examples + with the workgraph. + +.. panels:: + :header: panel-header-text + :body: bg-light + :footer: bg-light border-0 + + ------ + :column: col-lg-12 + + .. link-button:: zero_to_hero + :type: ref + :text: Zero to hero + :classes: btn-light text-left stretched-link font-weight-bold + ^^^^^^^^^^^^ + + A short module on how to write the basic type of workflows in AiiDA: work functions. + The module also revises the usage of calculation functions to add simple Python functions to the provenance. + + +++++++++++++ + .. list-table:: + :widths: 50 50 + :class: footer-table + :header-rows: 0 + + * - |time| 30 min + - |aiida| :aiida-green:`Basic` + +.. panels:: + :header: panel-header-text + :body: bg-light + :footer: bg-light border-0 + + ------ + :column: col-lg-12 + + .. link-button:: qe + :type: ref + :text: Computational materials science + :classes: btn-light text-left stretched-link font-weight-bold + ^^^^^^^^^^^^ + + A step-by-step introduction to the basics of writing work chains in AiiDA. + After completing this module, you will be ready to start writing your own scientific workflows! + + +++++++++++++ + .. list-table:: + :widths: 50 50 + :class: footer-table + :header-rows: 0 + + * - |time| 60 min + - |aiida| :aiida-green:`Intermediate` + + +.. panels:: + :header: panel-header-text + :body: bg-light + :footer: bg-light border-0 + + ------ + :column: col-lg-12 + + .. link-button:: eos + :type: ref + :text: A Real-world example - Equation of state + :classes: btn-light text-left stretched-link font-weight-bold + ^^^^^^^^^^^^ + + A step-by-step introduction to the basics of writing work chains in AiiDA. + After completing this module, you will be ready to start writing your own scientific workflows! + + +++++++++++++ + .. list-table:: + :widths: 50 50 + :class: footer-table + :header-rows: 0 + + * - |time| 60 min + - |aiida| :aiida-green:`Intermediate` + +.. toctree:: + :hidden: + + zero_to_hero + qe + eos + diff --git a/docs/sections/writing_workflows_with_workgraph/qe.ipynb b/docs/sections/writing_workflows_with_workgraph/qe.ipynb new file mode 100644 index 00000000..eb21bb63 --- /dev/null +++ b/docs/sections/writing_workflows_with_workgraph/qe.ipynb @@ -0,0 +1,476 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "058c3541", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "================================\n", + "Computational materials science\n", + "================================\n", + "\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "068c3108", + "metadata": { + "lines_to_next_cell": 0 + }, + "outputs": [], + "source": [ + "# Introduction\n", + "# ============\n", + "# In this tutorial, you will use `AiiDA-WorkGraph` to carry out a DFT calculation using Quantum ESPRESSO.\n", + "#\n", + "# Requirements\n", + "# ------------\n", + "# To run this tutorial, you need to install `aiida-workgraph`, `aiida-quantumespresso` and `aiida-pseudo`. Open a terminal and run:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# pip install aiida-workgraph aiida-quantumespresso aiida-pseudo\n", + "# aiida-pseudo install sssp -x PBEsol\n", + "#\n", + "# Start the AiiDA daemon if needed:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# verdi daemon start\n", + "#\n", + "# Start the web server\n", + "# --------------------\n", + "#\n", + "# Open a terminal, and run:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# workgraph web start\n", + "#\n", + "# Then visit the page `http://127.0.0.1:8000/workgraph`, where you can view the workgraph later.\n", + "#\n", + "# Load the AiiDA profile.\n", + "#\n", + "\n", + "\n", + "from aiida import load_profile\n", + "\n", + "load_profile()\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e5762b39", + "metadata": {}, + "outputs": [], + "source": [ + "# First workflow: calculate the energy of N2 molecule\n", + "# ===================================================\n", + "# Define a workgraph\n", + "# -------------------\n", + "# aiida-quantumespresso provides a CalcJob: `PwCalculation` to run a PW calculation. we can use it directly in the WorkGraph. The inputs and outputs of the task is automatically generated based on the `PwCalculation` CalcJob.\n", + "#\n", + "\n", + "from aiida_quantumespresso.calculations.pw import PwCalculation\n", + "from aiida_workgraph import WorkGraph\n", + "\n", + "#\n", + "wg = WorkGraph(\"energy_n2\")\n", + "pw1 = wg.add_task(PwCalculation, name=\"pw1\")\n", + "pw1.to_html()\n", + "#\n", + "# visualize the task in jupyter-notebook\n", + "# pw1\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "00ec9e67", + "metadata": {}, + "outputs": [], + "source": [ + "# Prepare the inputs and submit the workflow\n", + "# ------------------------------------------\n", + "#\n", + "#\n", + "\n", + "from aiida import load_profile\n", + "from aiida.common.exceptions import NotExistent\n", + "from aiida.orm import (\n", + " Dict,\n", + " KpointsData,\n", + " StructureData,\n", + " load_code,\n", + " load_group,\n", + " InstalledCode,\n", + " load_computer,\n", + ")\n", + "from ase.build import molecule\n", + "\n", + "#\n", + "load_profile()\n", + "# create pw code\n", + "try:\n", + " pw_code = load_code(\n", + " \"qe-7.2-pw@localhost\"\n", + " ) # The computer label can also be omitted here\n", + "except NotExistent:\n", + " pw_code = InstalledCode(\n", + " computer=load_computer(\"localhost\"),\n", + " filepath_executable=\"pw.x\",\n", + " label=\"qe-7.2-pw\",\n", + " default_calc_job_plugin=\"quantumespresso.pw\",\n", + " ).store()\n", + "# create input structure\n", + "mol = molecule(\"N2\")\n", + "mol.center(vacuum=1.5)\n", + "mol.pbc = True\n", + "structure_n2 = StructureData(ase=mol)\n", + "paras = Dict(\n", + " {\n", + " \"CONTROL\": {\n", + " \"calculation\": \"scf\",\n", + " },\n", + " \"SYSTEM\": {\n", + " \"ecutwfc\": 30,\n", + " \"ecutrho\": 240,\n", + " \"occupations\": \"smearing\",\n", + " \"smearing\": \"gaussian\",\n", + " \"degauss\": 0.1,\n", + " },\n", + " }\n", + ")\n", + "kpoints = KpointsData()\n", + "kpoints.set_kpoints_mesh([1, 1, 1])\n", + "# Load the pseudopotential family.\n", + "pseudo_family = load_group(\"SSSP/1.3/PBEsol/efficiency\")\n", + "pseudos = pseudo_family.get_pseudos(structure=structure_n2)\n", + "#\n", + "metadata = {\n", + " \"options\": {\n", + " \"resources\": {\n", + " \"num_machines\": 1,\n", + " \"num_mpiprocs_per_machine\": 1,\n", + " },\n", + " }\n", + "}\n", + "#\n", + "# ------------------------- Set the inputs -------------------------\n", + "pw1.set(\n", + " {\n", + " \"code\": pw_code,\n", + " \"structure\": structure_n2,\n", + " \"parameters\": paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + " }\n", + ")\n", + "# ------------------------- Submit the calculation -------------------------\n", + "wg.submit(wait=True, timeout=200)\n", + "# ------------------------- Print the output -------------------------\n", + "print(\n", + " \"Energy of an un-relaxed N2 molecule: {:0.3f}\".format(\n", + " pw1.outputs[\"output_parameters\"].value.get_dict()[\"energy\"]\n", + " )\n", + ")\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "546d7f16", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Generate node graph from the AiiDA process:\n", + "#\n", + "\n", + "from aiida_workgraph.utils import generate_node_graph\n", + "\n", + "generate_node_graph(wg.pk)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbf6e16a", + "metadata": {}, + "outputs": [], + "source": [ + "# Second workflow: atomization energy of N2 molecule\n", + "# ==================================================\n", + "#\n", + "# The atomization energy of :math:`N_2` is defined as the energy difference between the :math:`N_2` molecule and two isolated N atoms.\n", + "#\n", + "# .. code-block:: python\n", + "#\n", + "# e_atomization = 2 * e_atom - e_molecule\n", + "\n", + "# Define a calcfunction to calculate the atomization energy\n", + "# ---------------------------------------------------------\n", + "#\n", + "\n", + "from aiida_workgraph import task\n", + "\n", + "#\n", + "@task.calcfunction()\n", + "def atomization_energy(output_atom, output_mol):\n", + " from aiida.orm import Float\n", + "\n", + " e = output_atom[\"energy\"] * output_mol[\"number_of_atoms\"] - output_mol[\"energy\"]\n", + " return Float(e)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d72cdd4", + "metadata": {}, + "outputs": [], + "source": [ + "# Create the structure of nitrogen Atom.\n", + "#\n", + "\n", + "from ase import Atoms\n", + "from aiida.orm import StructureData\n", + "\n", + "#\n", + "atoms = Atoms(\"N\")\n", + "atoms.center(vacuum=1.5)\n", + "atoms.pbc = True\n", + "structure_n = StructureData(ase=atoms)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9b29ba1b", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Create a workgraph\n", + "# ------------------\n", + "\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "from aiida.orm import load_code\n", + "\n", + "#\n", + "# load the PW code\n", + "pw_code = load_code(\"qe-7.2-pw@localhost\")\n", + "#\n", + "wg = WorkGraph(\"atomization_energy\")\n", + "#\n", + "# create the PW task\n", + "pw_n = wg.add_task(PwCalculation, name=\"pw_n\")\n", + "pw_n.set(\n", + " {\n", + " \"code\": pw_code,\n", + " \"structure\": structure_n,\n", + " \"parameters\": paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + " }\n", + ")\n", + "pw_n2 = wg.add_task(PwCalculation, name=\"pw_n2\")\n", + "pw_n2.set(\n", + " {\n", + " \"code\": pw_code,\n", + " \"structure\": structure_n2,\n", + " \"parameters\": paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + " }\n", + ")\n", + "# create the task to calculate the atomization energy\n", + "atomization = wg.add_task(atomization_energy, name=\"atomization_energy\")\n", + "wg.add_link(pw_n.outputs[\"output_parameters\"], atomization.inputs[\"output_atom\"])\n", + "wg.add_link(pw_n2.outputs[\"output_parameters\"], atomization.inputs[\"output_mol\"])\n", + "wg.to_html()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4fb27b4", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Submit the workgraph and print the atomization energy.\n", + "#\n", + "\n", + "\n", + "wg.submit(wait=True, timeout=300)\n", + "print(\n", + " \"Atomization energy: {:0.3f} eV\".format(atomization.outputs[\"result\"].value.value)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e94e6744", + "metadata": {}, + "outputs": [], + "source": [ + "# If you start the web app (`workgraph web start`), you can visit the page http://127.0.0.1:8000/workgraph to view the tasks.\n", + "#\n", + "# You can also generate node graph from the AiiDA process:\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph.utils import generate_node_graph\n", + "\n", + "generate_node_graph(wg.pk)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6b6ee792", + "metadata": {}, + "outputs": [], + "source": [ + "# Use already existing workchain\n", + "# ===============================\n", + "# Can we register a task from a workchain? Can we set the a input item of a namespace? Yes, we can!\n", + "#\n", + "# In the `PwRelaxWorkChain`, one can set the relax type (`calculation` key) in the input namespace `base.pw.parameters`. Now we create a new task to update the pw parameters.\n", + "#\n", + "\n", + "from aiida_workgraph import task\n", + "\n", + "\n", + "@task.calcfunction()\n", + "def pw_parameters(paras, relax_type):\n", + " paras1 = paras.clone()\n", + " paras1[\"CONTROL\"][\"calculation\"] = relax_type\n", + " return paras1" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1653b3aa", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Now, we create the workgraph to relax the structure of N2 molecule.\n", + "#\n", + "\n", + "from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain\n", + "\n", + "#\n", + "wg = WorkGraph(\"test_pw_relax\")\n", + "# pw task\n", + "pw_relax1 = wg.add_task(PwRelaxWorkChain, name=\"pw_relax1\")\n", + "# Load the pseudopotential family.\n", + "pseudos = pseudo_family.get_pseudos(structure=structure_n2)\n", + "pw_relax1.set(\n", + " {\n", + " \"base\": {\n", + " \"pw\": {\"code\": pw_code, \"pseudos\": pseudos, \"metadata\": metadata},\n", + " \"kpoints\": kpoints,\n", + " },\n", + " \"structure\": structure_n2,\n", + " },\n", + ")\n", + "paras_task = wg.add_task(pw_parameters, \"parameters\", paras=paras, relax_type=\"relax\")\n", + "wg.add_link(paras_task.outputs[0], pw_relax1.inputs[\"base.pw.parameters\"])\n", + "# One can submit the workgraph directly\n", + "# wg.submit(wait=True, timeout=200)\n", + "# print(\n", + "# \"\\nEnergy of a relaxed N2 molecule: {:0.3f}\".format(\n", + "# pw_relax1.node.outputs.output_parameters.get_dict()[\"energy\"]\n", + "# )\n", + "# )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a5c735ff", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Use `protocol` to set input parameters (Experimental)\n", + "# ====================================================\n", + "# The aiida-quantumespresso package supports setting input parameters from protocol. For example, the PwRelaxWorkChain has a `get_builder_from_protocol` method. In this tutorial, we will show how to use the `protocol` to set the input parameters inside the WorkGraph.\n", + "#\n", + "\n", + "from aiida_workgraph import build_task, WorkGraph\n", + "from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain\n", + "from ase.build import bulk\n", + "from aiida import orm\n", + "from pprint import pprint\n", + "\n", + "#\n", + "pw_code = orm.load_code(\"qe-7.2-pw@localhost\")\n", + "wg = WorkGraph(\"test_pw_relax\")\n", + "structure_si = orm.StructureData(ase=bulk(\"Si\"))\n", + "pw_relax1 = wg.add_task(PwRelaxWorkChain, name=\"pw_relax1\")\n", + "# set the inputs from the protocol\n", + "# this will call the `PwRelaxWorkChain.get_builder_from_protocol` method\n", + "# to set the inputs of the workchain\n", + "pw_relax1.set_from_protocol(\n", + " pw_code, structure_si, protocol=\"fast\", pseudo_family=\"SSSP/1.2/PBEsol/efficiency\"\n", + ")\n", + "# we can now inspect the inputs of the workchain\n", + "print(\"The inputs for the PwBaseWorkchain are:\")\n", + "print(\"-\" * 80)\n", + "pprint(pw_relax1.inputs[\"base\"].value)\n", + "print(\"\\nThe input parameters for pw are:\")\n", + "print(\"-\" * 80)\n", + "pprint(pw_relax1.inputs[\"base\"].value[\"pw\"][\"parameters\"].get_dict())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7c6bb4f5", + "metadata": {}, + "outputs": [], + "source": [ + "# One can also adjust the parameters of the `PwRelaxWorkChain` to from protocol.\n", + "#\n", + "\n", + "# For example, we want to remove the `base_final_scf` from the inputs, so that the `PwRelaxWorkChain` will not run the `base_final_scf` step.\n", + "pw_relax1.inputs[\"base_final_scf\"].value = None\n", + "# submit the workgraph\n", + "# wg.submit(wait=True, timeout=200)" + ] + } + ], + "metadata": { + "jupytext": { + "cell_metadata_filter": "-all", + "main_language": "python", + "notebook_metadata_filter": "-all" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb b/docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb new file mode 100644 index 00000000..f86153e4 --- /dev/null +++ b/docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb @@ -0,0 +1,658 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "ee2af78b", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"\n", + "======================================\n", + "AiiDA-WorkGraph: From Zero To Hero\n", + "======================================\n", + "\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac4c08ae", + "metadata": {}, + "outputs": [], + "source": [ + "# In this tutorial, you will learn `AiiDA-WorkGraph` to build your workflow to carry out DFT calculation. It's recommended to run this tutorial inside a Jupyter notebook.\n", + "#\n", + "# Requirements\n", + "# ===============\n", + "# To run this tutorial, you need to install `aiida-workgraph`, `aiida-quantumespresso`. Open a terminal and run:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# pip install aiida-workgraph[widget] aiida-quantumespresso\n", + "#\n", + "# Restart (or start) the AiiDA daemon if needed:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# verdi daemon restart\n", + "#\n", + "# Load the AiiDA profile.\n", + "#\n", + "\n", + "\n", + "from aiida import load_profile\n", + "\n", + "load_profile()\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f014608", + "metadata": {}, + "outputs": [], + "source": [ + "# First workflow\n", + "# ===============\n", + "# Suppose we want to calculate ```(x + y) * z ``` in two steps. First, add `x` and `y`, then multiply the result with `z`.\n", + "#\n", + "# In AiiDA, we can define two `calcfunction` to do the `add` and `mutiply`:\n", + "#\n", + "\n", + "from aiida_workgraph import task\n", + "\n", + "\n", + "@task.calcfunction()\n", + "def add(x, y):\n", + " return x + y\n", + "\n", + "\n", + "@task.calcfunction()\n", + "def multiply(x, y):\n", + " return x * y" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "734ce56a", + "metadata": {}, + "outputs": [], + "source": [ + "# Create the workflow\n", + "# --------------------\n", + "# Three steps:\n", + "#\n", + "# - create a empty WorkGraph\n", + "# - add tasks: `add` and `multiply`.\n", + "# - link the output of the `add` task to the `x` input of the `multiply` task.\n", + "#\n", + "#\n", + "# In a jupyter notebook, you can visualize the workgraph directly.\n", + "#\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "\n", + "#\n", + "wg = WorkGraph(\"add_multiply_workflow\")\n", + "wg.add_task(add, name=\"add1\")\n", + "wg.add_task(multiply, name=\"multiply1\", x=wg.tasks[\"add1\"].outputs[\"result\"])\n", + "# export the workgraph to html file so that it can be visualized in a browser\n", + "wg.to_html()\n", + "# visualize the workgraph in jupyter-notebook\n", + "# wg\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d680cb0a", + "metadata": {}, + "outputs": [], + "source": [ + "# Submit the workgraph\n", + "# -------------------------\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph.utils import generate_node_graph\n", + "from aiida.orm import Int\n", + "\n", + "#\n", + "# ------------------------- Submit the calculation -------------------\n", + "wg.submit(\n", + " inputs={\"add1\": {\"x\": Int(2), \"y\": Int(3)}, \"multiply1\": {\"y\": Int(4)}}, wait=True\n", + ")\n", + "# ------------------------- Print the output -------------------------\n", + "assert wg.tasks[\"multiply1\"].outputs[\"result\"].value == 20\n", + "print(\n", + " \"\\nResult of multiply1 is {} \\n\\n\".format(\n", + " wg.tasks[\"multiply1\"].outputs[\"result\"].value\n", + " )\n", + ")\n", + "# ------------------------- Generate node graph -------------------\n", + "generate_node_graph(wg.pk)\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "24704a56", + "metadata": {}, + "outputs": [], + "source": [ + "# CalcJob and WorkChain\n", + "# =======================\n", + "# AiiDA uses `CalcJob` to run a calculation on a remote computer. AiiDA community also provides a lot of well-written `calcfunction` and `WorkChain`. One can use these AiiDA component direclty in the WorkGraph. The inputs and outputs of the task is automatically generated based on the input and output port of the AiiDA component.\n", + "#\n", + "# Here is an example of using the `ArithmeticAddCalculation` Calcjob inside the workgraph. Suppose we want to calculate ```(x + y) + z ``` in two steps.\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "from aiida.calculations.arithmetic.add import ArithmeticAddCalculation\n", + "\n", + "#\n", + "wg = WorkGraph(\"test_calcjob\")\n", + "new = wg.add_task\n", + "new(ArithmeticAddCalculation, name=\"add1\")\n", + "wg.add_task(ArithmeticAddCalculation, name=\"add2\", x=wg.tasks[\"add1\"].outputs[\"sum\"])\n", + "wg.to_html()\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5475f286", + "metadata": {}, + "outputs": [], + "source": [ + "# Inspect the node\n", + "# ----------------\n", + "# How do I know which input and output to connect?\n", + "#\n", + "# The inputs and outputs of a task are generated automatically based on the inputs/outputs of the AiiDA component. WorkGraph also has some built-in ports, like `_wait` and `_outputs`. One can inpsect a task's inputs and outputs.\n", + "#\n", + "# Note: special case for `calcfunction`, the default name of its output is `result`.\n", + "#\n", + "\n", + "\n", + "# visualize the task\n", + "wg.tasks[\"add1\"].to_html()\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ad64a224", + "metadata": {}, + "outputs": [], + "source": [ + "# First Real-world Workflow: atomization energy of molecule\n", + "# ==========================================================\n", + "#\n", + "# The atomization energy, $\\Delta E$, of a molecule can be expressed as:\n", + "#\n", + "# .. math::\n", + "#\n", + "# \\Delta E = n_{\\text{atom}} \\times E_{\\text{atom}} - E_{\\text{molecule}}\n", + "#\n", + "# Where:\n", + "#\n", + "# - :math:`\\Delta E` is the atomization energy of the molecule.\n", + "# - :math:`n_{\\text{atom}}` is the number of atoms.\n", + "# - :math:`E_{\\text{atom}}` is the energy of an isolated atom.\n", + "# - :math:`E_{\\text{molecule}}` is the energy of the molecule.\n", + "#\n", + "# Define a workgraph\n", + "# -------------------\n", + "# aiida-quantumespresso provides `PwCalculation` CalcJob and `PwBaseWorkChain` to run a PW calculation. we can use it directly in the WorkGraph. Here we use the `PwCalculation` CalcJob.\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "from aiida.engine import calcfunction\n", + "from aiida_quantumespresso.calculations.pw import PwCalculation\n", + "\n", + "#\n", + "\n", + "\n", + "@calcfunction\n", + "def atomization_energy(output_atom, output_mol):\n", + " from aiida.orm import Float\n", + "\n", + " e = output_atom[\"energy\"] * output_mol[\"number_of_atoms\"] - output_mol[\"energy\"]\n", + " return Float(e)\n", + "\n", + "\n", + "#\n", + "wg = WorkGraph(\"atomization_energy\")\n", + "pw_atom = wg.add_task(PwCalculation, name=\"pw_atom\")\n", + "pw_mol = wg.add_task(PwCalculation, name=\"pw_mol\")\n", + "# create the task to calculate the atomization energy\n", + "wg.add_task(\n", + " atomization_energy,\n", + " name=\"atomization_energy\",\n", + " output_atom=pw_atom.outputs[\"output_parameters\"],\n", + " output_mol=pw_mol.outputs[\"output_parameters\"],\n", + ")\n", + "# export the workgraph to html file so that it can be visualized in a browser\n", + "wg.to_html()\n", + "# visualize the workgraph in jupyter-notebook\n", + "# wg\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbc973db", + "metadata": {}, + "outputs": [], + "source": [ + "# Prepare the inputs and submit the workflow\n", + "# -------------------------------------------\n", + "# You need to set up the code, computer, and pseudo potential for the calculation. Please refer to the this [documentation](https://aiida-quantumespresso.readthedocs.io/en/latest/installation/index.html) for more details.\n", + "#\n", + "# You can also stip this step.\n", + "#\n", + "\n", + "from aiida import load_profile\n", + "from aiida.common.exceptions import NotExistent\n", + "from aiida.orm import (\n", + " Dict,\n", + " KpointsData,\n", + " StructureData,\n", + " load_code,\n", + " load_group,\n", + " InstalledCode,\n", + " load_computer,\n", + ")\n", + "from ase.build import molecule\n", + "from ase import Atoms\n", + "\n", + "#\n", + "load_profile()\n", + "# create pw code\n", + "try:\n", + " pw_code = load_code(\n", + " \"qe-7.2-pw@localhost\"\n", + " ) # The computer label can also be omitted here\n", + "except NotExistent:\n", + " pw_code = InstalledCode(\n", + " computer=load_computer(\"localhost\"),\n", + " filepath_executable=\"pw.x\",\n", + " label=\"qe-7.2-pw\",\n", + " default_calc_job_plugin=\"quantumespresso.pw\",\n", + " ).store()\n", + "# create structure\n", + "n_atom = Atoms(\"N\")\n", + "n_atom.center(vacuum=1.5)\n", + "n_atom.pbc = True\n", + "structure_n = StructureData(ase=n_atom)\n", + "structure_n2 = StructureData(ase=molecule(\"N2\", vacuum=1.5, pbc=True))\n", + "# create the PW task\n", + "paras = Dict(\n", + " {\n", + " \"CONTROL\": {\n", + " \"calculation\": \"scf\",\n", + " },\n", + " \"SYSTEM\": {\n", + " \"ecutwfc\": 30,\n", + " \"ecutrho\": 240,\n", + " \"occupations\": \"smearing\",\n", + " \"smearing\": \"gaussian\",\n", + " \"degauss\": 0.1,\n", + " },\n", + " }\n", + ")\n", + "kpoints = KpointsData()\n", + "kpoints.set_kpoints_mesh([1, 1, 1])\n", + "# Load the pseudopotential family.\n", + "pseudo_family = load_group(\"SSSP/1.3/PBEsol/efficiency\")\n", + "pseudos = pseudo_family.get_pseudos(structure=structure_n2)\n", + "#\n", + "metadata = {\n", + " \"options\": {\n", + " \"resources\": {\n", + " \"num_machines\": 1,\n", + " \"num_mpiprocs_per_machine\": 1,\n", + " },\n", + " }\n", + "}\n", + "#\n", + "# ------------------------- Set the inputs -------------------------\n", + "wg.tasks[\"pw_atom\"].set(\n", + " {\n", + " \"code\": pw_code,\n", + " \"structure\": structure_n,\n", + " \"parameters\": paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + " }\n", + ")\n", + "wg.tasks[\"pw_mol\"].set(\n", + " {\n", + " \"code\": pw_code,\n", + " \"structure\": structure_n2,\n", + " \"parameters\": paras,\n", + " \"kpoints\": kpoints,\n", + " \"pseudos\": pseudos,\n", + " \"metadata\": metadata,\n", + " }\n", + ")\n", + "# ------------------------- Submit the calculation -------------------\n", + "wg.submit(wait=True, timeout=200)\n", + "# ------------------------- Print the output -------------------------\n", + "print(\n", + " \"Energy of a N atom: {:0.3f}\".format(\n", + " wg.tasks[\"pw_atom\"].outputs[\"output_parameters\"].value.get_dict()[\"energy\"]\n", + " )\n", + ")\n", + "print(\n", + " \"Energy of an un-relaxed N2 molecule: {:0.3f}\".format(\n", + " wg.tasks[\"pw_mol\"].outputs[\"output_parameters\"].value.get_dict()[\"energy\"]\n", + " )\n", + ")\n", + "print(\n", + " \"Atomization energy: {:0.3f} eV\".format(\n", + " wg.tasks[\"atomization_energy\"].outputs[\"result\"].value.value\n", + " )\n", + ")\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6f7afec0", + "metadata": {}, + "outputs": [], + "source": [ + "# Generate node graph from the AiiDA process:\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph.utils import generate_node_graph\n", + "\n", + "generate_node_graph(wg.pk)\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c546090b", + "metadata": {}, + "outputs": [], + "source": [ + "# Advanced Topic: Dynamic Workgraph\n", + "# ==================================\n", + "#\n", + "# Graph builder\n", + "# --------------\n", + "# If we want to generate the workgraph on-the-fly, for example, if you want to use `if` to create the tasks, or repeat a calculation until it converges, you can use Graph Builder.\n", + "#\n", + "# Suppose we want to calculate:\n", + "#\n", + "# .. code-block:: python\n", + "#\n", + "# # step 1\n", + "# result = add(x, y)\n", + "# # step 2\n", + "# if result > 0:\n", + "# result = add(result, y)\n", + "# else:\n", + "# result = multiply(result, y)\n", + "# # step 3\n", + "# result = add(result, y)\n", + "#\n", + "\n", + "\n", + "# Create a WorkGraph which is dynamically generated based on the input\n", + "# then we output the result of from the context\n", + "from aiida_workgraph import task\n", + "\n", + "#\n", + "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"context.result\"}])\n", + "def add_multiply_if_generator(x, y):\n", + " wg = WorkGraph()\n", + " if x.value > 0:\n", + " add1 = wg.add_task(add, name=\"add1\", x=x, y=y)\n", + " # export the result of add1 to the context, so that context.result = add1.results\n", + " add1.set_context({\"result\": \"result\"})\n", + " else:\n", + " multiply1 = wg.add_task(multiply, name=\"multiply1\", x=x, y=y)\n", + " # export the result of multiply1 to the context\n", + " multiply1.set_context({\"result\": \"result\"})\n", + " return wg\n", + "\n", + "\n", + "#\n", + "wg = WorkGraph(\"if_task\")\n", + "wg.add_task(add, name=\"add1\")\n", + "wg.add_task(\n", + " add_multiply_if_generator,\n", + " name=\"add_multiply_if1\",\n", + " x=wg.tasks[\"add1\"].outputs[\"result\"],\n", + ")\n", + "wg.add_task(add, name=\"add2\", x=wg.tasks[\"add_multiply_if1\"].outputs[\"result\"])\n", + "wg.to_html()\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "52bf9f07", + "metadata": { + "lines_to_next_cell": 0 + }, + "outputs": [], + "source": [ + "# Submit the WorkGraph\n", + "\n", + "\n", + "wg.submit(\n", + " inputs={\n", + " \"add1\": {\"x\": 1, \"y\": 2},\n", + " \"add_multiply_if1\": {\"y\": 2},\n", + " \"add2\": {\"y\": 2},\n", + " },\n", + " wait=True,\n", + ")\n", + "# ------------------------- Print the output -------------------------\n", + "assert wg.tasks[\"add2\"].outputs[\"result\"].value == 7\n", + "print(\"\\nResult of add2 is {} \\n\\n\".format(wg.tasks[\"add2\"].outputs[\"result\"].value))\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5947d2e2", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Note: one can not see the detail of the `add_multiply_if1` before you running it.\n", + "#\n", + "# Second Real-world Workflow: Equation of state (EOS) WorkGraph\n", + "# =============================================================\n", + "#\n", + "# First, create the calcfunction for the job.\n", + "#\n", + "\n", + "from aiida import orm\n", + "from aiida_workgraph import task\n", + "\n", + "#\n", + "# explicitly define the output socket name to match the return value of the function\n", + "@task.calcfunction(outputs=[{\"name\": \"structures\"}])\n", + "def scale_structure(structure, scales):\n", + " \"\"\"Scale the structure by the given scales.\"\"\"\n", + " atoms = structure.get_ase()\n", + " structures = {}\n", + " for i in range(len(scales)):\n", + " atoms1 = atoms.copy()\n", + " atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True)\n", + " structure = orm.StructureData(ase=atoms1)\n", + " structures[f\"s_{i}\"] = structure\n", + " return {\"structures\": structures}\n", + "\n", + "\n", + "#\n", + "# Output result from context to the output socket\n", + "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"context.result\"}])\n", + "def all_scf(structures, scf_inputs):\n", + " \"\"\"Run the scf calculation for each structure.\"\"\"\n", + " from aiida_workgraph import WorkGraph\n", + " from aiida_quantumespresso.calculations.pw import PwCalculation\n", + "\n", + " wg = WorkGraph()\n", + " for key, structure in structures.items():\n", + " pw1 = wg.add_task(PwCalculation, name=f\"pw1_{key}\", structure=structure)\n", + " pw1.set(scf_inputs)\n", + " # save the output parameters to the context\n", + " pw1.set_context({\"output_parameters\": f\"result.{key}\"})\n", + " return wg\n", + "\n", + "\n", + "#\n", + "\n", + "\n", + "@task.calcfunction()\n", + "# because this is a calcfunction, and the input datas are dynamic, we need use **datas.\n", + "def eos(**datas):\n", + " \"\"\"Fit the EOS of the data.\"\"\"\n", + " from ase.eos import EquationOfState\n", + "\n", + " #\n", + " volumes = []\n", + " energies = []\n", + " for _, data in datas.items():\n", + " volumes.append(data.dict.volume)\n", + " energies.append(data.dict.energy)\n", + " unit = data.dict.energy_units\n", + " #\n", + " eos = EquationOfState(volumes, energies)\n", + " v0, e0, B = eos.fit()\n", + " eos = orm.Dict({\"unit\": unit, \"v0\": v0, \"e0\": e0, \"B\": B})\n", + " return eos" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f26e396b", + "metadata": {}, + "outputs": [], + "source": [ + "# Define the WorkGraph\n", + "# ----------------------\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph import WorkGraph\n", + "\n", + "#\n", + "wg = WorkGraph(\"eos\")\n", + "scale_structure1 = wg.add_task(scale_structure, name=\"scale_structure1\")\n", + "all_scf1 = wg.add_task(\n", + " all_scf, name=\"all_scf1\", structures=scale_structure1.outputs[\"structures\"]\n", + ")\n", + "eos1 = wg.add_task(eos, name=\"eos1\", datas=all_scf1.outputs[\"result\"])\n", + "wg.to_html()\n", + "#" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7e0d9d7e", + "metadata": { + "lines_to_next_cell": 2 + }, + "outputs": [], + "source": [ + "# Combine with a relax task\n", + "# --------------------------\n", + "#\n", + "\n", + "\n", + "from aiida_workgraph import WorkGraph, task\n", + "from aiida_quantumespresso.calculations.pw import PwCalculation\n", + "\n", + "#\n", + "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"eos1.result\"}])\n", + "def eos_workgraph(structure=None, scales=None, scf_inputs=None):\n", + " wg = WorkGraph(\"eos\")\n", + " scale_structure1 = wg.add_task(\n", + " scale_structure, name=\"scale_structure1\", structure=structure, scales=scales\n", + " )\n", + " all_scf1 = wg.add_task(all_scf, name=\"all_scf1\", scf_inputs=scf_inputs)\n", + " eos1 = wg.add_task(eos, name=\"eos1\")\n", + " wg.add_link(scale_structure1.outputs[\"structures\"], all_scf1.inputs[\"structures\"])\n", + " wg.add_link(all_scf1.outputs[\"result\"], eos1.inputs[\"datas\"])\n", + " return wg\n", + "\n", + "\n", + "#\n", + "\n", + "# -------------------------------------------------------\n", + "wg = WorkGraph(\"relax_eos\")\n", + "relax_task = wg.add_task(PwCalculation, name=\"relax1\")\n", + "eos_wg_task = wg.add_task(\n", + " eos_workgraph, name=\"eos1\", structure=relax_task.outputs[\"output_structure\"]\n", + ")\n", + "wg.to_html()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "105f4dca", + "metadata": {}, + "outputs": [], + "source": [ + "# Useful tool: Web GUI\n", + "# =====================\n", + "# Open a terminal, and run:\n", + "#\n", + "# .. code-block:: console\n", + "#\n", + "# workgraph web start\n", + "#\n", + "# Then visit the page `http://127.0.0.1:8000/workgraph`, where you can view all the workgraphs.\n", + "#\n", + "# What's Next\n", + "# ===========\n", + "#\n", + "# +-----------------------------------------------------+-----------------------------------------------------------------------------+\n", + "# | `Concepts <../../concept/index.html>`_ | A brief introduction of WorkGraph’s main concepts. |\n", + "# +-----------------------------------------------------+-----------------------------------------------------------------------------+\n", + "# | `HowTo <../../howto/index.html>`_ | Advanced topics, e.g., flow control using `if`, `while`, and `context`. |\n", + "# +-----------------------------------------------------+-----------------------------------------------------------------------------+\n", + "#" + ] + } + ], + "metadata": { + "jupytext": { + "cell_metadata_filter": "-all", + "main_language": "python", + "notebook_metadata_filter": "-all" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From db2e36a8a863f71c5657a71f944d731748fb7aa0 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 04:47:24 +0200 Subject: [PATCH 07/39] debug linkcheck --- .github/workflows/ci.yml | 1 + docs/conf.py | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e2d5d1e7..132299b8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -34,6 +34,7 @@ jobs: pip install -r requirements.txt - name: "Build HTML docs" run: | + echo "SPHINX_LINKCHECK $SPHINX_LINKCHECK" make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going diff --git a/docs/conf.py b/docs/conf.py index 0f905222..2a948e15 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -402,6 +402,7 @@ def setup(app): # we don't want to run the notebook during a linkcheck nb_execution_mode = "off" if os.getenv("SPHINX_LINKCHECK") is None else "auto" +print("nb_execution_mode:", nb_execution_mode) nb_execution_excludepatterns = ["querying.ipynb"] # Intersphinx configuration From ddbc440a3ad007a6120fc9bee29554aac4b8f0b8 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 09:23:29 +0200 Subject: [PATCH 08/39] initialize sphinx-gallery --- docs/conf.py | 78 ++- .../autogen/GALLERY_HEADER.rst | 3 + .../autogen/eos.py | 283 ++++++++ .../autogen/qe.py | 357 ++++++++++ .../autogen/zero_to_hero.py | 527 ++++++++++++++ .../eos.ipynb | 361 ---------- .../index.rst | 96 +-- .../writing_workflows_with_workgraph/qe.ipynb | 476 ------------- .../zero_to_hero.ipynb | 658 ------------------ requirements.txt | 3 +- 10 files changed, 1250 insertions(+), 1592 deletions(-) create mode 100644 docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst create mode 100644 docs/gallery/sections/writing_workflows_with_workgraph/autogen/eos.py create mode 100644 docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py create mode 100644 docs/gallery/sections/writing_workflows_with_workgraph/autogen/zero_to_hero.py delete mode 100644 docs/sections/writing_workflows_with_workgraph/eos.ipynb delete mode 100644 docs/sections/writing_workflows_with_workgraph/qe.ipynb delete mode 100644 docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb diff --git a/docs/conf.py b/docs/conf.py index 2a948e15..9f77b3bb 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -42,6 +42,7 @@ "sphinx_copybutton", "sphinx_panels", "sphinx_tabs.tabs", + "sphinx_gallery.gen_gallery" ] myst_enable_extensions = [ @@ -399,11 +400,8 @@ def setup(app): app.add_css_file("css/custom.css") -# we don't want to run the notebook during a linkcheck - -nb_execution_mode = "off" if os.getenv("SPHINX_LINKCHECK") is None else "auto" -print("nb_execution_mode:", nb_execution_mode) -nb_execution_excludepatterns = ["querying.ipynb"] +# we don't want to run the notebook +nb_execution_mode = "off" # Intersphinx configuration intersphinx_mapping = { @@ -411,6 +409,30 @@ def setup(app): "plumpy": ("https://plumpy.readthedocs.io/en/latest/", None), } +gallery_src_relative_dir = ( + "gallery" # relative path of the gallery src wrt. sphinx src +) +sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"] +# we mimik the structure in the sphinx src directory in the gallery src directory + +# path of python scripts that should be executed +gallery_src_dirs = [ + os.path.join(gallery_src_relative_dir, autogen_dir) + for autogen_dir in sphinx_src_autogen_dirs +] +sphinx_gallery_conf = { + "filename_pattern": "/*", + "examples_dirs": gallery_src_dirs, # in sphinx-gallery doc referred as gallery source + "gallery_dirs": sphinx_src_autogen_dirs, # path to where to gallery puts generated files +} + +# ignore in the autogenerated ipynb files to surpress warning +exclude_patterns.extend( + [ + os.path.join(sphinx_src_autogen_dir, "*ipynb") + for sphinx_src_autogen_dir in sphinx_src_autogen_dirs + ] +) # Compile all things needed before building the docs # For instance, convert the notebook templates to actual tutorial and solution versions @@ -420,3 +442,49 @@ def setup(app): universal_newlines=True, ) ) + +def copy_html_files(app, exception): + """ + Copy all .html files from source to build directory, maintaining the directory structure. + """ + print("Copying HTML files to build directory") + copy_print_info = "Copying HTML files to build directory" + print() + print(copy_print_info) + print(len(copy_print_info) * "=") + if exception is not None: # Only copy files if the build succeeded + print( + "Build failed, but we still try to copy the HTML files to the build directory" + ) + try: + src_path = Path(app.builder.srcdir) + build_path = Path(app.builder.outdir) + + copy_print_info = f"Copying html files from sphinx src directory {src_path}" + print() + print(copy_print_info) + print(len(copy_print_info) * "-") + for html_file in src_path.rglob("*.html"): + relative_path = html_file.relative_to(src_path) + destination_file = build_path / relative_path + destination_file.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(html_file, destination_file) + print(f"Copy {html_file} to {destination_file}") + + gallery_src_path = Path(app.builder.srcdir / Path(gallery_src_relative_dir)) + + copy_print_info = ( + f"Copying html files from gallery src directory {gallery_src_path} to build" + ) + print() + print(copy_print_info) + print(len(copy_print_info) * "-") + for html_file in gallery_src_path.rglob("*.html"): + relative_path = html_file.relative_to(gallery_src_path) + destination_file = build_path / relative_path + destination_file.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(html_file, destination_file) + print(f"Copy {html_file} to {destination_file}") + except Exception as e: + print(f"Failed to copy HTML files: {e}") + diff --git a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst new file mode 100644 index 00000000..ef04f7d6 --- /dev/null +++ b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst @@ -0,0 +1,3 @@ +================================ +Running workflows with WorkGraph +================================ diff --git a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/eos.py b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/eos.py new file mode 100644 index 00000000..8d88a42a --- /dev/null +++ b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/eos.py @@ -0,0 +1,283 @@ +""" +================================== +Equation of state (EOS) WorkGraph +================================== + +""" + +# %% +# To run this tutorial, you need to install aiida-workgraph and restart the daemon. Open a terminal and run: +# +# .. code-block:: console +# +# pip install aiida-workgraph[widget] aiida-quantumespresso +# +# Restart (or start) the AiiDA daemon if needed: +# +# .. code-block:: console +# +# verdi daemon restart +# +# Create the calcfunction task +# ============================ +# + + +from aiida import orm +from aiida_workgraph import task + +# +# explicitly define the output socket name to match the return value of the function +@task.calcfunction(outputs=[{"name": "structures"}]) +def scale_structure(structure, scales): + """Scale the structure by the given scales.""" + atoms = structure.get_ase() + structures = {} + for i in range(len(scales)): + atoms1 = atoms.copy() + atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True) + structure = orm.StructureData(ase=atoms1) + structures[f"s_{i}"] = structure + return {"structures": structures} + + +# +# Output result from context to the output socket +@task.graph_builder(outputs=[{"name": "result", "from": "context.result"}]) +def all_scf(structures, scf_inputs): + """Run the scf calculation for each structure.""" + from aiida_workgraph import WorkGraph + from aiida_quantumespresso.calculations.pw import PwCalculation + + wg = WorkGraph() + for key, structure in structures.items(): + pw1 = wg.add_task(PwCalculation, name=f"pw1_{key}", structure=structure) + pw1.set(scf_inputs) + # save the output parameters to the context + pw1.set_context({"output_parameters": f"result.{key}"}) + return wg + + +# + + +@task.calcfunction() +# because this is a calcfunction, and the input datas are dynamic, we need use **datas. +def eos(**datas): + """Fit the EOS of the data.""" + from ase.eos import EquationOfState + + # + volumes = [] + energies = [] + for _, data in datas.items(): + volumes.append(data.dict.volume) + energies.append(data.dict.energy) + unit = data.dict.energy_units + # + eos = EquationOfState(volumes, energies) + v0, e0, B = eos.fit() + eos = orm.Dict({"unit": unit, "v0": v0, "e0": e0, "B": B}) + return eos + + +# %% +# Build the workgraph +# =================== +# Three steps: +# +# - create an empty WorkGraph +# - add tasks: scale_structure, all_scf and eos. +# - link the output and input sockets for the tasks. +# +# Visualize the workgraph +# ----------------------- +# If you are running in a jupyter notebook, you can visualize the workgraph directly. +# + +from aiida_workgraph import WorkGraph + +# +wg = WorkGraph("eos") +scale_structure1 = wg.add_task(scale_structure, name="scale_structure1") +all_scf1 = wg.add_task(all_scf, name="all_scf1") +eos1 = wg.add_task(eos, name="eos1") +wg.add_link(scale_structure1.outputs["structures"], all_scf1.inputs["structures"]) +wg.add_link(all_scf1.outputs["result"], eos1.inputs["datas"]) +wg.to_html() +# visualize the workgraph in jupyter-notebook +# wg + + +# %% +# Prepare inputs and run +# ---------------------- +# + + +from aiida import load_profile +from aiida.common.exceptions import NotExistent +from aiida.orm import ( + Dict, + KpointsData, + StructureData, + load_code, + load_group, + InstalledCode, + load_computer, +) +from ase.build import bulk + +# +load_profile() +# create pw code +try: + pw_code = load_code( + "qe-7.2-pw@localhost" + ) # The computer label can also be omitted here +except NotExistent: + pw_code = InstalledCode( + computer=load_computer("localhost"), + filepath_executable="pw.x", + label="qe-7.2-pw", + default_calc_job_plugin="quantumespresso.pw", + ).store() +# +si = orm.StructureData(ase=bulk("Si")) +pw_paras = Dict( + { + "CONTROL": { + "calculation": "scf", + }, + "SYSTEM": { + "ecutwfc": 30, + "ecutrho": 240, + "occupations": "smearing", + "smearing": "gaussian", + "degauss": 0.1, + }, + } +) +# Load the pseudopotential family. +pseudo_family = load_group("SSSP/1.3/PBEsol/efficiency") +pseudos = pseudo_family.get_pseudos(structure=si) +# +metadata = { + "options": { + "resources": { + "num_machines": 1, + "num_mpiprocs_per_machine": 1, + }, + } +} +# +kpoints = orm.KpointsData() +kpoints.set_kpoints_mesh([3, 3, 3]) +pseudos = pseudo_family.get_pseudos(structure=si) +scf_inputs = { + "code": pw_code, + "parameters": pw_paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, +} +# ------------------------------------------------------- +# set the input parameters for each task +wg.tasks["scale_structure1"].set({"structure": si, "scales": [0.95, 1.0, 1.05]}) +wg.tasks["all_scf1"].set({"scf_inputs": scf_inputs}) +print("Waiting for the workgraph to finish...") +wg.submit(wait=True, timeout=300) +# one can also run the workgraph directly +# wg.run() + + +# %% +# Print out the results: +# + + +data = wg.tasks["eos1"].outputs["result"].value.get_dict() +print("B: {B}\nv0: {v0}\ne0: {e0}\nv0: {v0}".format(**data)) + +# %% +# Use graph builder +# ================= +# The Graph Builder allow user to create a dynamic workflow based on the input value, as well as nested workflows. +# + +from aiida_workgraph import WorkGraph, task + +# +@task.graph_builder(outputs=[{"name": "result", "from": "eos1.result"}]) +def eos_workgraph(structure=None, scales=None, scf_inputs=None): + wg = WorkGraph("eos") + scale_structure1 = wg.add_task( + scale_structure, name="scale_structure1", structure=structure, scales=scales + ) + all_scf1 = wg.add_task(all_scf, name="all_scf1", scf_inputs=scf_inputs) + eos1 = wg.add_task(eos, name="eos1") + wg.add_link(scale_structure1.outputs["structures"], all_scf1.inputs["structures"]) + wg.add_link(all_scf1.outputs["result"], eos1.inputs["datas"]) + return wg + + +# %% +# Then we can use the `eos_workgraph` in two ways: +# +# - Direct run the function and generate the workgraph, then submit +# - Use it as a task inside another workgraph to create nested workflow. +# +# Use the graph builder directly +# ------------------------------ +# + +wg = eos_workgraph(structure=si, scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs) +# One can submit the workgraph directly +# wg.submit(wait=True, timeout=300) +wg.to_html() +# visualize the workgraph in jupyter-notebook +# wg + +# %% +# Use it inside another workgraph +# ------------------------------- +# For example, we want to combine relax with eos. +# + + +from aiida_workgraph import WorkGraph +from copy import deepcopy +from aiida_quantumespresso.calculations.pw import PwCalculation + +# +# ------------------------------------------------------- +relax_pw_paras = deepcopy(pw_paras) +relax_pw_paras["CONTROL"]["calculation"] = "vc-relax" +relax_inputs = { + "structure": si, + "code": pw_code, + "parameters": relax_pw_paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, +} +# ------------------------------------------------------- +wg = WorkGraph("relax_eos") +relax_task = wg.add_task(PwCalculation, name="relax1") +relax_task.set(relax_inputs) +eos_wg_task = wg.add_task( + eos_workgraph, name="eos1", scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs +) +wg.add_link(relax_task.outputs["output_structure"], eos_wg_task.inputs["structure"]) +# ------------------------------------------------------- +# One can submit the workgraph directly +# wg.submit(wait=True, timeout=300) + +wg.to_html() +# visualize the workgraph in jupyter-notebook +# wg + +# %% +# Summary +# ======= +# There are many ways to create the workflow using graph builder. For example, one can add the relax step inside the `eos_workgraph`, and add a `run_relax` argument to control the logic. diff --git a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py new file mode 100644 index 00000000..aaab06bb --- /dev/null +++ b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py @@ -0,0 +1,357 @@ +""" +================================ +Computational materials science +================================ + +""" + +# %% +# Introduction +# ============ +# In this tutorial, you will use `AiiDA-WorkGraph` to carry out a DFT calculation using Quantum ESPRESSO. +# +# Requirements +# ------------ +# To run this tutorial, you need to install `aiida-workgraph`, `aiida-quantumespresso` and `aiida-pseudo`. Open a terminal and run: +# +# .. code-block:: console +# +# pip install aiida-workgraph aiida-quantumespresso aiida-pseudo +# aiida-pseudo install sssp -x PBEsol +# +# Start the AiiDA daemon if needed: +# +# .. code-block:: console +# +# verdi daemon start +# +# Start the web server +# -------------------- +# +# Open a terminal, and run: +# +# .. code-block:: console +# +# workgraph web start +# +# Then visit the page `http://127.0.0.1:8000/workgraph`, where you can view the workgraph later. +# +# Load the AiiDA profile. +# + + +from aiida import load_profile + +load_profile() +# +# %% +# First workflow: calculate the energy of N2 molecule +# =================================================== +# Define a workgraph +# ------------------- +# aiida-quantumespresso provides a CalcJob: `PwCalculation` to run a PW calculation. we can use it directly in the WorkGraph. The inputs and outputs of the task is automatically generated based on the `PwCalculation` CalcJob. +# + +from aiida_quantumespresso.calculations.pw import PwCalculation +from aiida_workgraph import WorkGraph + +# +wg = WorkGraph("energy_n2") +pw1 = wg.add_task(PwCalculation, name="pw1") +pw1.to_html() +# +# visualize the task in jupyter-notebook +# pw1 +# + +# %% +# Prepare the inputs and submit the workflow +# ------------------------------------------ +# +# + +from aiida import load_profile +from aiida.common.exceptions import NotExistent +from aiida.orm import ( + Dict, + KpointsData, + StructureData, + load_code, + load_group, + InstalledCode, + load_computer, +) +from ase.build import molecule + +# +load_profile() +# create pw code +try: + pw_code = load_code( + "qe-7.2-pw@localhost" + ) # The computer label can also be omitted here +except NotExistent: + pw_code = InstalledCode( + computer=load_computer("localhost"), + filepath_executable="pw.x", + label="qe-7.2-pw", + default_calc_job_plugin="quantumespresso.pw", + ).store() +# create input structure +mol = molecule("N2") +mol.center(vacuum=1.5) +mol.pbc = True +structure_n2 = StructureData(ase=mol) +paras = Dict( + { + "CONTROL": { + "calculation": "scf", + }, + "SYSTEM": { + "ecutwfc": 30, + "ecutrho": 240, + "occupations": "smearing", + "smearing": "gaussian", + "degauss": 0.1, + }, + } +) +kpoints = KpointsData() +kpoints.set_kpoints_mesh([1, 1, 1]) +# Load the pseudopotential family. +pseudo_family = load_group("SSSP/1.3/PBEsol/efficiency") +pseudos = pseudo_family.get_pseudos(structure=structure_n2) +# +metadata = { + "options": { + "resources": { + "num_machines": 1, + "num_mpiprocs_per_machine": 1, + }, + } +} +# +# ------------------------- Set the inputs ------------------------- +pw1.set( + { + "code": pw_code, + "structure": structure_n2, + "parameters": paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, + } +) +# ------------------------- Submit the calculation ------------------------- +wg.submit(wait=True, timeout=200) +# ------------------------- Print the output ------------------------- +print( + "Energy of an un-relaxed N2 molecule: {:0.3f}".format( + pw1.outputs["output_parameters"].value.get_dict()["energy"] + ) +) +# + +# %% +# Generate node graph from the AiiDA process: +# + +from aiida_workgraph.utils import generate_node_graph + +generate_node_graph(wg.pk) + + +# %% +# Second workflow: atomization energy of N2 molecule +# ================================================== +# +# The atomization energy of :math:`N_2` is defined as the energy difference between the :math:`N_2` molecule and two isolated N atoms. +# +# .. code-block:: python +# +# e_atomization = 2 * e_atom - e_molecule + +# Define a calcfunction to calculate the atomization energy +# --------------------------------------------------------- +# + +from aiida_workgraph import task + +# +@task.calcfunction() +def atomization_energy(output_atom, output_mol): + from aiida.orm import Float + + e = output_atom["energy"] * output_mol["number_of_atoms"] - output_mol["energy"] + return Float(e) + + +# %% +# Create the structure of nitrogen Atom. +# + +from ase import Atoms +from aiida.orm import StructureData + +# +atoms = Atoms("N") +atoms.center(vacuum=1.5) +atoms.pbc = True +structure_n = StructureData(ase=atoms) + +# %% +# Create a workgraph +# ------------------ + + +from aiida_workgraph import WorkGraph +from aiida.orm import load_code + +# +# load the PW code +pw_code = load_code("qe-7.2-pw@localhost") +# +wg = WorkGraph("atomization_energy") +# +# create the PW task +pw_n = wg.add_task(PwCalculation, name="pw_n") +pw_n.set( + { + "code": pw_code, + "structure": structure_n, + "parameters": paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, + } +) +pw_n2 = wg.add_task(PwCalculation, name="pw_n2") +pw_n2.set( + { + "code": pw_code, + "structure": structure_n2, + "parameters": paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, + } +) +# create the task to calculate the atomization energy +atomization = wg.add_task(atomization_energy, name="atomization_energy") +wg.add_link(pw_n.outputs["output_parameters"], atomization.inputs["output_atom"]) +wg.add_link(pw_n2.outputs["output_parameters"], atomization.inputs["output_mol"]) +wg.to_html() + + +# %% +# Submit the workgraph and print the atomization energy. +# + + +wg.submit(wait=True, timeout=300) +print( + "Atomization energy: {:0.3f} eV".format(atomization.outputs["result"].value.value) +) + + +# %% +# If you start the web app (`workgraph web start`), you can visit the page http://127.0.0.1:8000/workgraph to view the tasks. +# +# You can also generate node graph from the AiiDA process: +# + + +from aiida_workgraph.utils import generate_node_graph + +generate_node_graph(wg.pk) + +# %% +# Use already existing workchain +# =============================== +# Can we register a task from a workchain? Can we set the a input item of a namespace? Yes, we can! +# +# In the `PwRelaxWorkChain`, one can set the relax type (`calculation` key) in the input namespace `base.pw.parameters`. Now we create a new task to update the pw parameters. +# + +from aiida_workgraph import task + + +@task.calcfunction() +def pw_parameters(paras, relax_type): + paras1 = paras.clone() + paras1["CONTROL"]["calculation"] = relax_type + return paras1 + + +# %% +# Now, we create the workgraph to relax the structure of N2 molecule. +# + +from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain + +# +wg = WorkGraph("test_pw_relax") +# pw task +pw_relax1 = wg.add_task(PwRelaxWorkChain, name="pw_relax1") +# Load the pseudopotential family. +pseudos = pseudo_family.get_pseudos(structure=structure_n2) +pw_relax1.set( + { + "base": { + "pw": {"code": pw_code, "pseudos": pseudos, "metadata": metadata}, + "kpoints": kpoints, + }, + "structure": structure_n2, + }, +) +paras_task = wg.add_task(pw_parameters, "parameters", paras=paras, relax_type="relax") +wg.add_link(paras_task.outputs[0], pw_relax1.inputs["base.pw.parameters"]) +# One can submit the workgraph directly +# wg.submit(wait=True, timeout=200) +# print( +# "\nEnergy of a relaxed N2 molecule: {:0.3f}".format( +# pw_relax1.node.outputs.output_parameters.get_dict()["energy"] +# ) +# ) + + +# %% +# Use `protocol` to set input parameters (Experimental) +# ==================================================== +# The aiida-quantumespresso package supports setting input parameters from protocol. For example, the PwRelaxWorkChain has a `get_builder_from_protocol` method. In this tutorial, we will show how to use the `protocol` to set the input parameters inside the WorkGraph. +# + +from aiida_workgraph import build_task, WorkGraph +from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain +from ase.build import bulk +from aiida import orm +from pprint import pprint + +# +pw_code = orm.load_code("qe-7.2-pw@localhost") +wg = WorkGraph("test_pw_relax") +structure_si = orm.StructureData(ase=bulk("Si")) +pw_relax1 = wg.add_task(PwRelaxWorkChain, name="pw_relax1") +# set the inputs from the protocol +# this will call the `PwRelaxWorkChain.get_builder_from_protocol` method +# to set the inputs of the workchain +pw_relax1.set_from_protocol( + pw_code, structure_si, protocol="fast", pseudo_family="SSSP/1.2/PBEsol/efficiency" +) +# we can now inspect the inputs of the workchain +print("The inputs for the PwBaseWorkchain are:") +print("-" * 80) +pprint(pw_relax1.inputs["base"].value) +print("\nThe input parameters for pw are:") +print("-" * 80) +pprint(pw_relax1.inputs["base"].value["pw"]["parameters"].get_dict()) + + +# %% +# One can also adjust the parameters of the `PwRelaxWorkChain` to from protocol. +# + +# For example, we want to remove the `base_final_scf` from the inputs, so that the `PwRelaxWorkChain` will not run the `base_final_scf` step. +pw_relax1.inputs["base_final_scf"].value = None +# submit the workgraph +# wg.submit(wait=True, timeout=200) diff --git a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/zero_to_hero.py b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/zero_to_hero.py new file mode 100644 index 00000000..664ca9a2 --- /dev/null +++ b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/zero_to_hero.py @@ -0,0 +1,527 @@ +""" +====================================== +AiiDA-WorkGraph: From Zero To Hero +====================================== + +""" + +# %% +# In this tutorial, you will learn `AiiDA-WorkGraph` to build your workflow to carry out DFT calculation. It's recommended to run this tutorial inside a Jupyter notebook. +# +# Requirements +# =============== +# To run this tutorial, you need to install `aiida-workgraph`, `aiida-quantumespresso`. Open a terminal and run: +# +# .. code-block:: console +# +# pip install aiida-workgraph[widget] aiida-quantumespresso +# +# Restart (or start) the AiiDA daemon if needed: +# +# .. code-block:: console +# +# verdi daemon restart +# +# Load the AiiDA profile. +# + + +from aiida import load_profile + +load_profile() +# + +# %% +# First workflow +# =============== +# Suppose we want to calculate ```(x + y) * z ``` in two steps. First, add `x` and `y`, then multiply the result with `z`. +# +# In AiiDA, we can define two `calcfunction` to do the `add` and `mutiply`: +# + +from aiida_workgraph import task + + +@task.calcfunction() +def add(x, y): + return x + y + + +@task.calcfunction() +def multiply(x, y): + return x * y + + +# %% +# Create the workflow +# -------------------- +# Three steps: +# +# - create a empty WorkGraph +# - add tasks: `add` and `multiply`. +# - link the output of the `add` task to the `x` input of the `multiply` task. +# +# +# In a jupyter notebook, you can visualize the workgraph directly. +# + +from aiida_workgraph import WorkGraph + +# +wg = WorkGraph("add_multiply_workflow") +wg.add_task(add, name="add1") +wg.add_task(multiply, name="multiply1", x=wg.tasks["add1"].outputs["result"]) +# export the workgraph to html file so that it can be visualized in a browser +wg.to_html() +# visualize the workgraph in jupyter-notebook +# wg +# + +# %% +# Submit the workgraph +# ------------------------- +# + + +from aiida_workgraph.utils import generate_node_graph +from aiida.orm import Int + +# +# ------------------------- Submit the calculation ------------------- +wg.submit( + inputs={"add1": {"x": Int(2), "y": Int(3)}, "multiply1": {"y": Int(4)}}, wait=True +) +# ------------------------- Print the output ------------------------- +assert wg.tasks["multiply1"].outputs["result"].value == 20 +print( + "\nResult of multiply1 is {} \n\n".format( + wg.tasks["multiply1"].outputs["result"].value + ) +) +# ------------------------- Generate node graph ------------------- +generate_node_graph(wg.pk) +# + +# %% +# CalcJob and WorkChain +# ======================= +# AiiDA uses `CalcJob` to run a calculation on a remote computer. AiiDA community also provides a lot of well-written `calcfunction` and `WorkChain`. One can use these AiiDA component direclty in the WorkGraph. The inputs and outputs of the task is automatically generated based on the input and output port of the AiiDA component. +# +# Here is an example of using the `ArithmeticAddCalculation` Calcjob inside the workgraph. Suppose we want to calculate ```(x + y) + z ``` in two steps. +# + + +from aiida_workgraph import WorkGraph +from aiida.calculations.arithmetic.add import ArithmeticAddCalculation + +# +wg = WorkGraph("test_calcjob") +new = wg.add_task +new(ArithmeticAddCalculation, name="add1") +wg.add_task(ArithmeticAddCalculation, name="add2", x=wg.tasks["add1"].outputs["sum"]) +wg.to_html() +# + +# %% +# Inspect the node +# ---------------- +# How do I know which input and output to connect? +# +# The inputs and outputs of a task are generated automatically based on the inputs/outputs of the AiiDA component. WorkGraph also has some built-in ports, like `_wait` and `_outputs`. One can inpsect a task's inputs and outputs. +# +# Note: special case for `calcfunction`, the default name of its output is `result`. +# + + +# visualize the task +wg.tasks["add1"].to_html() +# + +# %% +# First Real-world Workflow: atomization energy of molecule +# ========================================================== +# +# The atomization energy, $\Delta E$, of a molecule can be expressed as: +# +# .. math:: +# +# \Delta E = n_{\text{atom}} \times E_{\text{atom}} - E_{\text{molecule}} +# +# Where: +# +# - :math:`\Delta E` is the atomization energy of the molecule. +# - :math:`n_{\text{atom}}` is the number of atoms. +# - :math:`E_{\text{atom}}` is the energy of an isolated atom. +# - :math:`E_{\text{molecule}}` is the energy of the molecule. +# +# Define a workgraph +# ------------------- +# aiida-quantumespresso provides `PwCalculation` CalcJob and `PwBaseWorkChain` to run a PW calculation. we can use it directly in the WorkGraph. Here we use the `PwCalculation` CalcJob. +# + + +from aiida_workgraph import WorkGraph +from aiida.engine import calcfunction +from aiida_quantumespresso.calculations.pw import PwCalculation + +# + + +@calcfunction +def atomization_energy(output_atom, output_mol): + from aiida.orm import Float + + e = output_atom["energy"] * output_mol["number_of_atoms"] - output_mol["energy"] + return Float(e) + + +# +wg = WorkGraph("atomization_energy") +pw_atom = wg.add_task(PwCalculation, name="pw_atom") +pw_mol = wg.add_task(PwCalculation, name="pw_mol") +# create the task to calculate the atomization energy +wg.add_task( + atomization_energy, + name="atomization_energy", + output_atom=pw_atom.outputs["output_parameters"], + output_mol=pw_mol.outputs["output_parameters"], +) +# export the workgraph to html file so that it can be visualized in a browser +wg.to_html() +# visualize the workgraph in jupyter-notebook +# wg +# + +# %% +# Prepare the inputs and submit the workflow +# ------------------------------------------- +# You need to set up the code, computer, and pseudo potential for the calculation. Please refer to the this [documentation](https://aiida-quantumespresso.readthedocs.io/en/latest/installation/index.html) for more details. +# +# You can also stip this step. +# + +from aiida import load_profile +from aiida.common.exceptions import NotExistent +from aiida.orm import ( + Dict, + KpointsData, + StructureData, + load_code, + load_group, + InstalledCode, + load_computer, +) +from ase.build import molecule +from ase import Atoms + +# +load_profile() +# create pw code +try: + pw_code = load_code( + "qe-7.2-pw@localhost" + ) # The computer label can also be omitted here +except NotExistent: + pw_code = InstalledCode( + computer=load_computer("localhost"), + filepath_executable="pw.x", + label="qe-7.2-pw", + default_calc_job_plugin="quantumespresso.pw", + ).store() +# create structure +n_atom = Atoms("N") +n_atom.center(vacuum=1.5) +n_atom.pbc = True +structure_n = StructureData(ase=n_atom) +structure_n2 = StructureData(ase=molecule("N2", vacuum=1.5, pbc=True)) +# create the PW task +paras = Dict( + { + "CONTROL": { + "calculation": "scf", + }, + "SYSTEM": { + "ecutwfc": 30, + "ecutrho": 240, + "occupations": "smearing", + "smearing": "gaussian", + "degauss": 0.1, + }, + } +) +kpoints = KpointsData() +kpoints.set_kpoints_mesh([1, 1, 1]) +# Load the pseudopotential family. +pseudo_family = load_group("SSSP/1.3/PBEsol/efficiency") +pseudos = pseudo_family.get_pseudos(structure=structure_n2) +# +metadata = { + "options": { + "resources": { + "num_machines": 1, + "num_mpiprocs_per_machine": 1, + }, + } +} +# +# ------------------------- Set the inputs ------------------------- +wg.tasks["pw_atom"].set( + { + "code": pw_code, + "structure": structure_n, + "parameters": paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, + } +) +wg.tasks["pw_mol"].set( + { + "code": pw_code, + "structure": structure_n2, + "parameters": paras, + "kpoints": kpoints, + "pseudos": pseudos, + "metadata": metadata, + } +) +# ------------------------- Submit the calculation ------------------- +wg.submit(wait=True, timeout=200) +# ------------------------- Print the output ------------------------- +print( + "Energy of a N atom: {:0.3f}".format( + wg.tasks["pw_atom"].outputs["output_parameters"].value.get_dict()["energy"] + ) +) +print( + "Energy of an un-relaxed N2 molecule: {:0.3f}".format( + wg.tasks["pw_mol"].outputs["output_parameters"].value.get_dict()["energy"] + ) +) +print( + "Atomization energy: {:0.3f} eV".format( + wg.tasks["atomization_energy"].outputs["result"].value.value + ) +) +# + +# %% +# Generate node graph from the AiiDA process: +# + + +from aiida_workgraph.utils import generate_node_graph + +generate_node_graph(wg.pk) +# + +# %% +# Advanced Topic: Dynamic Workgraph +# ================================== +# +# Graph builder +# -------------- +# If we want to generate the workgraph on-the-fly, for example, if you want to use `if` to create the tasks, or repeat a calculation until it converges, you can use Graph Builder. +# +# Suppose we want to calculate: +# +# .. code-block:: python +# +# # step 1 +# result = add(x, y) +# # step 2 +# if result > 0: +# result = add(result, y) +# else: +# result = multiply(result, y) +# # step 3 +# result = add(result, y) +# + + +# Create a WorkGraph which is dynamically generated based on the input +# then we output the result of from the context +from aiida_workgraph import task + +# +@task.graph_builder(outputs=[{"name": "result", "from": "context.result"}]) +def add_multiply_if_generator(x, y): + wg = WorkGraph() + if x.value > 0: + add1 = wg.add_task(add, name="add1", x=x, y=y) + # export the result of add1 to the context, so that context.result = add1.results + add1.set_context({"result": "result"}) + else: + multiply1 = wg.add_task(multiply, name="multiply1", x=x, y=y) + # export the result of multiply1 to the context + multiply1.set_context({"result": "result"}) + return wg + + +# +wg = WorkGraph("if_task") +wg.add_task(add, name="add1") +wg.add_task( + add_multiply_if_generator, + name="add_multiply_if1", + x=wg.tasks["add1"].outputs["result"], +) +wg.add_task(add, name="add2", x=wg.tasks["add_multiply_if1"].outputs["result"]) +wg.to_html() +# + +# %% +# Submit the WorkGraph + + +wg.submit( + inputs={ + "add1": {"x": 1, "y": 2}, + "add_multiply_if1": {"y": 2}, + "add2": {"y": 2}, + }, + wait=True, +) +# ------------------------- Print the output ------------------------- +assert wg.tasks["add2"].outputs["result"].value == 7 +print("\nResult of add2 is {} \n\n".format(wg.tasks["add2"].outputs["result"].value)) +# +# %% +# Note: one can not see the detail of the `add_multiply_if1` before you running it. +# +# Second Real-world Workflow: Equation of state (EOS) WorkGraph +# ============================================================= +# +# First, create the calcfunction for the job. +# + +from aiida import orm +from aiida_workgraph import task + +# +# explicitly define the output socket name to match the return value of the function +@task.calcfunction(outputs=[{"name": "structures"}]) +def scale_structure(structure, scales): + """Scale the structure by the given scales.""" + atoms = structure.get_ase() + structures = {} + for i in range(len(scales)): + atoms1 = atoms.copy() + atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True) + structure = orm.StructureData(ase=atoms1) + structures[f"s_{i}"] = structure + return {"structures": structures} + + +# +# Output result from context to the output socket +@task.graph_builder(outputs=[{"name": "result", "from": "context.result"}]) +def all_scf(structures, scf_inputs): + """Run the scf calculation for each structure.""" + from aiida_workgraph import WorkGraph + from aiida_quantumespresso.calculations.pw import PwCalculation + + wg = WorkGraph() + for key, structure in structures.items(): + pw1 = wg.add_task(PwCalculation, name=f"pw1_{key}", structure=structure) + pw1.set(scf_inputs) + # save the output parameters to the context + pw1.set_context({"output_parameters": f"result.{key}"}) + return wg + + +# + + +@task.calcfunction() +# because this is a calcfunction, and the input datas are dynamic, we need use **datas. +def eos(**datas): + """Fit the EOS of the data.""" + from ase.eos import EquationOfState + + # + volumes = [] + energies = [] + for _, data in datas.items(): + volumes.append(data.dict.volume) + energies.append(data.dict.energy) + unit = data.dict.energy_units + # + eos = EquationOfState(volumes, energies) + v0, e0, B = eos.fit() + eos = orm.Dict({"unit": unit, "v0": v0, "e0": e0, "B": B}) + return eos + + +# %% +# Define the WorkGraph +# ---------------------- +# + + +from aiida_workgraph import WorkGraph + +# +wg = WorkGraph("eos") +scale_structure1 = wg.add_task(scale_structure, name="scale_structure1") +all_scf1 = wg.add_task( + all_scf, name="all_scf1", structures=scale_structure1.outputs["structures"] +) +eos1 = wg.add_task(eos, name="eos1", datas=all_scf1.outputs["result"]) +wg.to_html() +# + +# %% +# Combine with a relax task +# -------------------------- +# + + +from aiida_workgraph import WorkGraph, task +from aiida_quantumespresso.calculations.pw import PwCalculation + +# +@task.graph_builder(outputs=[{"name": "result", "from": "eos1.result"}]) +def eos_workgraph(structure=None, scales=None, scf_inputs=None): + wg = WorkGraph("eos") + scale_structure1 = wg.add_task( + scale_structure, name="scale_structure1", structure=structure, scales=scales + ) + all_scf1 = wg.add_task(all_scf, name="all_scf1", scf_inputs=scf_inputs) + eos1 = wg.add_task(eos, name="eos1") + wg.add_link(scale_structure1.outputs["structures"], all_scf1.inputs["structures"]) + wg.add_link(all_scf1.outputs["result"], eos1.inputs["datas"]) + return wg + + +# + +# ------------------------------------------------------- +wg = WorkGraph("relax_eos") +relax_task = wg.add_task(PwCalculation, name="relax1") +eos_wg_task = wg.add_task( + eos_workgraph, name="eos1", structure=relax_task.outputs["output_structure"] +) +wg.to_html() + + +# %% +# Useful tool: Web GUI +# ===================== +# Open a terminal, and run: +# +# .. code-block:: console +# +# workgraph web start +# +# Then visit the page `http://127.0.0.1:8000/workgraph`, where you can view all the workgraphs. +# +# What's Next +# =========== +# +# +-----------------------------------------------------+-----------------------------------------------------------------------------+ +# | `Concepts <../../concept/index.html>`_ | A brief introduction of WorkGraph’s main concepts. | +# +-----------------------------------------------------+-----------------------------------------------------------------------------+ +# | `HowTo <../../howto/index.html>`_ | Advanced topics, e.g., flow control using `if`, `while`, and `context`. | +# +-----------------------------------------------------+-----------------------------------------------------------------------------+ +# diff --git a/docs/sections/writing_workflows_with_workgraph/eos.ipynb b/docs/sections/writing_workflows_with_workgraph/eos.ipynb deleted file mode 100644 index 6408030d..00000000 --- a/docs/sections/writing_workflows_with_workgraph/eos.ipynb +++ /dev/null @@ -1,361 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "7e9c7e1d", - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"\n", - "==================================\n", - "Equation of state (EOS) WorkGraph\n", - "==================================\n", - "\n", - "\"\"\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ce0adaf6", - "metadata": {}, - "outputs": [], - "source": [ - "# To run this tutorial, you need to install aiida-workgraph and restart the daemon. Open a terminal and run:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# pip install aiida-workgraph[widget] aiida-quantumespresso\n", - "#\n", - "# Restart (or start) the AiiDA daemon if needed:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# verdi daemon restart\n", - "#\n", - "# Create the calcfunction task\n", - "# ============================\n", - "#\n", - "\n", - "\n", - "from aiida import orm\n", - "from aiida_workgraph import task\n", - "\n", - "#\n", - "# explicitly define the output socket name to match the return value of the function\n", - "@task.calcfunction(outputs=[{\"name\": \"structures\"}])\n", - "def scale_structure(structure, scales):\n", - " \"\"\"Scale the structure by the given scales.\"\"\"\n", - " atoms = structure.get_ase()\n", - " structures = {}\n", - " for i in range(len(scales)):\n", - " atoms1 = atoms.copy()\n", - " atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True)\n", - " structure = orm.StructureData(ase=atoms1)\n", - " structures[f\"s_{i}\"] = structure\n", - " return {\"structures\": structures}\n", - "\n", - "\n", - "#\n", - "# Output result from context to the output socket\n", - "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"context.result\"}])\n", - "def all_scf(structures, scf_inputs):\n", - " \"\"\"Run the scf calculation for each structure.\"\"\"\n", - " from aiida_workgraph import WorkGraph\n", - " from aiida_quantumespresso.calculations.pw import PwCalculation\n", - "\n", - " wg = WorkGraph()\n", - " for key, structure in structures.items():\n", - " pw1 = wg.add_task(PwCalculation, name=f\"pw1_{key}\", structure=structure)\n", - " pw1.set(scf_inputs)\n", - " # save the output parameters to the context\n", - " pw1.set_context({\"output_parameters\": f\"result.{key}\"})\n", - " return wg\n", - "\n", - "\n", - "#\n", - "\n", - "\n", - "@task.calcfunction()\n", - "# because this is a calcfunction, and the input datas are dynamic, we need use **datas.\n", - "def eos(**datas):\n", - " \"\"\"Fit the EOS of the data.\"\"\"\n", - " from ase.eos import EquationOfState\n", - "\n", - " #\n", - " volumes = []\n", - " energies = []\n", - " for _, data in datas.items():\n", - " volumes.append(data.dict.volume)\n", - " energies.append(data.dict.energy)\n", - " unit = data.dict.energy_units\n", - " #\n", - " eos = EquationOfState(volumes, energies)\n", - " v0, e0, B = eos.fit()\n", - " eos = orm.Dict({\"unit\": unit, \"v0\": v0, \"e0\": e0, \"B\": B})\n", - " return eos" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "606b1848", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Build the workgraph\n", - "# ===================\n", - "# Three steps:\n", - "#\n", - "# - create an empty WorkGraph\n", - "# - add tasks: scale_structure, all_scf and eos.\n", - "# - link the output and input sockets for the tasks.\n", - "#\n", - "# Visualize the workgraph\n", - "# -----------------------\n", - "# If you are running in a jupyter notebook, you can visualize the workgraph directly.\n", - "#\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "\n", - "#\n", - "wg = WorkGraph(\"eos\")\n", - "scale_structure1 = wg.add_task(scale_structure, name=\"scale_structure1\")\n", - "all_scf1 = wg.add_task(all_scf, name=\"all_scf1\")\n", - "eos1 = wg.add_task(eos, name=\"eos1\")\n", - "wg.add_link(scale_structure1.outputs[\"structures\"], all_scf1.inputs[\"structures\"])\n", - "wg.add_link(all_scf1.outputs[\"result\"], eos1.inputs[\"datas\"])\n", - "wg.to_html()\n", - "# visualize the workgraph in jupyter-notebook\n", - "# wg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a9d5c260", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Prepare inputs and run\n", - "# ----------------------\n", - "#\n", - "\n", - "\n", - "from aiida import load_profile\n", - "from aiida.common.exceptions import NotExistent\n", - "from aiida.orm import (\n", - " Dict,\n", - " KpointsData,\n", - " StructureData,\n", - " load_code,\n", - " load_group,\n", - " InstalledCode,\n", - " load_computer,\n", - ")\n", - "from ase.build import bulk\n", - "\n", - "#\n", - "load_profile()\n", - "# create pw code\n", - "try:\n", - " pw_code = load_code(\n", - " \"qe-7.2-pw@localhost\"\n", - " ) # The computer label can also be omitted here\n", - "except NotExistent:\n", - " pw_code = InstalledCode(\n", - " computer=load_computer(\"localhost\"),\n", - " filepath_executable=\"pw.x\",\n", - " label=\"qe-7.2-pw\",\n", - " default_calc_job_plugin=\"quantumespresso.pw\",\n", - " ).store()\n", - "#\n", - "si = orm.StructureData(ase=bulk(\"Si\"))\n", - "pw_paras = Dict(\n", - " {\n", - " \"CONTROL\": {\n", - " \"calculation\": \"scf\",\n", - " },\n", - " \"SYSTEM\": {\n", - " \"ecutwfc\": 30,\n", - " \"ecutrho\": 240,\n", - " \"occupations\": \"smearing\",\n", - " \"smearing\": \"gaussian\",\n", - " \"degauss\": 0.1,\n", - " },\n", - " }\n", - ")\n", - "# Load the pseudopotential family.\n", - "pseudo_family = load_group(\"SSSP/1.3/PBEsol/efficiency\")\n", - "pseudos = pseudo_family.get_pseudos(structure=si)\n", - "#\n", - "metadata = {\n", - " \"options\": {\n", - " \"resources\": {\n", - " \"num_machines\": 1,\n", - " \"num_mpiprocs_per_machine\": 1,\n", - " },\n", - " }\n", - "}\n", - "#\n", - "kpoints = orm.KpointsData()\n", - "kpoints.set_kpoints_mesh([3, 3, 3])\n", - "pseudos = pseudo_family.get_pseudos(structure=si)\n", - "scf_inputs = {\n", - " \"code\": pw_code,\n", - " \"parameters\": pw_paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - "}\n", - "# -------------------------------------------------------\n", - "# set the input parameters for each task\n", - "wg.tasks[\"scale_structure1\"].set({\"structure\": si, \"scales\": [0.95, 1.0, 1.05]})\n", - "wg.tasks[\"all_scf1\"].set({\"scf_inputs\": scf_inputs})\n", - "print(\"Waiting for the workgraph to finish...\")\n", - "wg.submit(wait=True, timeout=300)\n", - "# one can also run the workgraph directly\n", - "# wg.run()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "be53a0ec", - "metadata": {}, - "outputs": [], - "source": [ - "# Print out the results:\n", - "#\n", - "\n", - "\n", - "data = wg.tasks[\"eos1\"].outputs[\"result\"].value.get_dict()\n", - "print(\"B: {B}\\nv0: {v0}\\ne0: {e0}\\nv0: {v0}\".format(**data))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4e68219c", - "metadata": {}, - "outputs": [], - "source": [ - "# Use graph builder\n", - "# =================\n", - "# The Graph Builder allow user to create a dynamic workflow based on the input value, as well as nested workflows.\n", - "#\n", - "\n", - "from aiida_workgraph import WorkGraph, task\n", - "\n", - "#\n", - "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"eos1.result\"}])\n", - "def eos_workgraph(structure=None, scales=None, scf_inputs=None):\n", - " wg = WorkGraph(\"eos\")\n", - " scale_structure1 = wg.add_task(\n", - " scale_structure, name=\"scale_structure1\", structure=structure, scales=scales\n", - " )\n", - " all_scf1 = wg.add_task(all_scf, name=\"all_scf1\", scf_inputs=scf_inputs)\n", - " eos1 = wg.add_task(eos, name=\"eos1\")\n", - " wg.add_link(scale_structure1.outputs[\"structures\"], all_scf1.inputs[\"structures\"])\n", - " wg.add_link(all_scf1.outputs[\"result\"], eos1.inputs[\"datas\"])\n", - " return wg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e4062238", - "metadata": {}, - "outputs": [], - "source": [ - "# Then we can use the `eos_workgraph` in two ways:\n", - "#\n", - "# - Direct run the function and generate the workgraph, then submit\n", - "# - Use it as a task inside another workgraph to create nested workflow.\n", - "#\n", - "# Use the graph builder directly\n", - "# ------------------------------\n", - "#\n", - "\n", - "wg = eos_workgraph(structure=si, scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs)\n", - "# One can submit the workgraph directly\n", - "# wg.submit(wait=True, timeout=300)\n", - "wg.to_html()\n", - "# visualize the workgraph in jupyter-notebook\n", - "# wg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8aad9026", - "metadata": {}, - "outputs": [], - "source": [ - "# Use it inside another workgraph\n", - "# -------------------------------\n", - "# For example, we want to combine relax with eos.\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "from copy import deepcopy\n", - "from aiida_quantumespresso.calculations.pw import PwCalculation\n", - "\n", - "#\n", - "# -------------------------------------------------------\n", - "relax_pw_paras = deepcopy(pw_paras)\n", - "relax_pw_paras[\"CONTROL\"][\"calculation\"] = \"vc-relax\"\n", - "relax_inputs = {\n", - " \"structure\": si,\n", - " \"code\": pw_code,\n", - " \"parameters\": relax_pw_paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - "}\n", - "# -------------------------------------------------------\n", - "wg = WorkGraph(\"relax_eos\")\n", - "relax_task = wg.add_task(PwCalculation, name=\"relax1\")\n", - "relax_task.set(relax_inputs)\n", - "eos_wg_task = wg.add_task(\n", - " eos_workgraph, name=\"eos1\", scales=[0.95, 1.0, 1.05], scf_inputs=scf_inputs\n", - ")\n", - "wg.add_link(relax_task.outputs[\"output_structure\"], eos_wg_task.inputs[\"structure\"])\n", - "# -------------------------------------------------------\n", - "# One can submit the workgraph directly\n", - "# wg.submit(wait=True, timeout=300)\n", - "\n", - "wg.to_html()\n", - "# visualize the workgraph in jupyter-notebook\n", - "# wg" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "bf39d602", - "metadata": {}, - "outputs": [], - "source": [ - "# Summary\n", - "# =======\n", - "# There are many ways to create the workflow using graph builder. For example, one can add the relax step inside the `eos_workgraph`, and add a `run_relax` argument to control the logic." - ] - } - ], - "metadata": { - "jupytext": { - "cell_metadata_filter": "-all", - "main_language": "python", - "notebook_metadata_filter": "-all" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/sections/writing_workflows_with_workgraph/index.rst b/docs/sections/writing_workflows_with_workgraph/index.rst index 8e7c2593..3314cbbd 100644 --- a/docs/sections/writing_workflows_with_workgraph/index.rst +++ b/docs/sections/writing_workflows_with_workgraph/index.rst @@ -1,97 +1,11 @@ -Writing workflows -================= +Writing workflows with WorkGraph +================================ -.. rst-class:: header-text - - Recently a new way to create workflows has been developed, the *workgraph*. - The workgraph should simplify the creation of the and provide a more user - friendly GUI that provides information about your workflow before - execution. In this section you will learn how to write different examples - with the workgraph. - -.. panels:: - :header: panel-header-text - :body: bg-light - :footer: bg-light border-0 - - ------ - :column: col-lg-12 - - .. link-button:: zero_to_hero - :type: ref - :text: Zero to hero - :classes: btn-light text-left stretched-link font-weight-bold - ^^^^^^^^^^^^ - - A short module on how to write the basic type of workflows in AiiDA: work functions. - The module also revises the usage of calculation functions to add simple Python functions to the provenance. - - +++++++++++++ - .. list-table:: - :widths: 50 50 - :class: footer-table - :header-rows: 0 - - * - |time| 30 min - - |aiida| :aiida-green:`Basic` - -.. panels:: - :header: panel-header-text - :body: bg-light - :footer: bg-light border-0 - - ------ - :column: col-lg-12 - - .. link-button:: qe - :type: ref - :text: Computational materials science - :classes: btn-light text-left stretched-link font-weight-bold - ^^^^^^^^^^^^ - - A step-by-step introduction to the basics of writing work chains in AiiDA. - After completing this module, you will be ready to start writing your own scientific workflows! - - +++++++++++++ - .. list-table:: - :widths: 50 50 - :class: footer-table - :header-rows: 0 - - * - |time| 60 min - - |aiida| :aiida-green:`Intermediate` - - -.. panels:: - :header: panel-header-text - :body: bg-light - :footer: bg-light border-0 - - ------ - :column: col-lg-12 - - .. link-button:: eos - :type: ref - :text: A Real-world example - Equation of state - :classes: btn-light text-left stretched-link font-weight-bold - ^^^^^^^^^^^^ - - A step-by-step introduction to the basics of writing work chains in AiiDA. - After completing this module, you will be ready to start writing your own scientific workflows! - - +++++++++++++ - .. list-table:: - :widths: 50 50 - :class: footer-table - :header-rows: 0 - - * - |time| 60 min - - |aiida| :aiida-green:`Intermediate` .. toctree:: :hidden: - zero_to_hero - qe - eos + autogen/zero_to_hero + autogen/qe + autogen/eos diff --git a/docs/sections/writing_workflows_with_workgraph/qe.ipynb b/docs/sections/writing_workflows_with_workgraph/qe.ipynb deleted file mode 100644 index eb21bb63..00000000 --- a/docs/sections/writing_workflows_with_workgraph/qe.ipynb +++ /dev/null @@ -1,476 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "058c3541", - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"\n", - "================================\n", - "Computational materials science\n", - "================================\n", - "\n", - "\"\"\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "068c3108", - "metadata": { - "lines_to_next_cell": 0 - }, - "outputs": [], - "source": [ - "# Introduction\n", - "# ============\n", - "# In this tutorial, you will use `AiiDA-WorkGraph` to carry out a DFT calculation using Quantum ESPRESSO.\n", - "#\n", - "# Requirements\n", - "# ------------\n", - "# To run this tutorial, you need to install `aiida-workgraph`, `aiida-quantumespresso` and `aiida-pseudo`. Open a terminal and run:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# pip install aiida-workgraph aiida-quantumespresso aiida-pseudo\n", - "# aiida-pseudo install sssp -x PBEsol\n", - "#\n", - "# Start the AiiDA daemon if needed:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# verdi daemon start\n", - "#\n", - "# Start the web server\n", - "# --------------------\n", - "#\n", - "# Open a terminal, and run:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# workgraph web start\n", - "#\n", - "# Then visit the page `http://127.0.0.1:8000/workgraph`, where you can view the workgraph later.\n", - "#\n", - "# Load the AiiDA profile.\n", - "#\n", - "\n", - "\n", - "from aiida import load_profile\n", - "\n", - "load_profile()\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e5762b39", - "metadata": {}, - "outputs": [], - "source": [ - "# First workflow: calculate the energy of N2 molecule\n", - "# ===================================================\n", - "# Define a workgraph\n", - "# -------------------\n", - "# aiida-quantumespresso provides a CalcJob: `PwCalculation` to run a PW calculation. we can use it directly in the WorkGraph. The inputs and outputs of the task is automatically generated based on the `PwCalculation` CalcJob.\n", - "#\n", - "\n", - "from aiida_quantumespresso.calculations.pw import PwCalculation\n", - "from aiida_workgraph import WorkGraph\n", - "\n", - "#\n", - "wg = WorkGraph(\"energy_n2\")\n", - "pw1 = wg.add_task(PwCalculation, name=\"pw1\")\n", - "pw1.to_html()\n", - "#\n", - "# visualize the task in jupyter-notebook\n", - "# pw1\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "00ec9e67", - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare the inputs and submit the workflow\n", - "# ------------------------------------------\n", - "#\n", - "#\n", - "\n", - "from aiida import load_profile\n", - "from aiida.common.exceptions import NotExistent\n", - "from aiida.orm import (\n", - " Dict,\n", - " KpointsData,\n", - " StructureData,\n", - " load_code,\n", - " load_group,\n", - " InstalledCode,\n", - " load_computer,\n", - ")\n", - "from ase.build import molecule\n", - "\n", - "#\n", - "load_profile()\n", - "# create pw code\n", - "try:\n", - " pw_code = load_code(\n", - " \"qe-7.2-pw@localhost\"\n", - " ) # The computer label can also be omitted here\n", - "except NotExistent:\n", - " pw_code = InstalledCode(\n", - " computer=load_computer(\"localhost\"),\n", - " filepath_executable=\"pw.x\",\n", - " label=\"qe-7.2-pw\",\n", - " default_calc_job_plugin=\"quantumespresso.pw\",\n", - " ).store()\n", - "# create input structure\n", - "mol = molecule(\"N2\")\n", - "mol.center(vacuum=1.5)\n", - "mol.pbc = True\n", - "structure_n2 = StructureData(ase=mol)\n", - "paras = Dict(\n", - " {\n", - " \"CONTROL\": {\n", - " \"calculation\": \"scf\",\n", - " },\n", - " \"SYSTEM\": {\n", - " \"ecutwfc\": 30,\n", - " \"ecutrho\": 240,\n", - " \"occupations\": \"smearing\",\n", - " \"smearing\": \"gaussian\",\n", - " \"degauss\": 0.1,\n", - " },\n", - " }\n", - ")\n", - "kpoints = KpointsData()\n", - "kpoints.set_kpoints_mesh([1, 1, 1])\n", - "# Load the pseudopotential family.\n", - "pseudo_family = load_group(\"SSSP/1.3/PBEsol/efficiency\")\n", - "pseudos = pseudo_family.get_pseudos(structure=structure_n2)\n", - "#\n", - "metadata = {\n", - " \"options\": {\n", - " \"resources\": {\n", - " \"num_machines\": 1,\n", - " \"num_mpiprocs_per_machine\": 1,\n", - " },\n", - " }\n", - "}\n", - "#\n", - "# ------------------------- Set the inputs -------------------------\n", - "pw1.set(\n", - " {\n", - " \"code\": pw_code,\n", - " \"structure\": structure_n2,\n", - " \"parameters\": paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - " }\n", - ")\n", - "# ------------------------- Submit the calculation -------------------------\n", - "wg.submit(wait=True, timeout=200)\n", - "# ------------------------- Print the output -------------------------\n", - "print(\n", - " \"Energy of an un-relaxed N2 molecule: {:0.3f}\".format(\n", - " pw1.outputs[\"output_parameters\"].value.get_dict()[\"energy\"]\n", - " )\n", - ")\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "546d7f16", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Generate node graph from the AiiDA process:\n", - "#\n", - "\n", - "from aiida_workgraph.utils import generate_node_graph\n", - "\n", - "generate_node_graph(wg.pk)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fbf6e16a", - "metadata": {}, - "outputs": [], - "source": [ - "# Second workflow: atomization energy of N2 molecule\n", - "# ==================================================\n", - "#\n", - "# The atomization energy of :math:`N_2` is defined as the energy difference between the :math:`N_2` molecule and two isolated N atoms.\n", - "#\n", - "# .. code-block:: python\n", - "#\n", - "# e_atomization = 2 * e_atom - e_molecule\n", - "\n", - "# Define a calcfunction to calculate the atomization energy\n", - "# ---------------------------------------------------------\n", - "#\n", - "\n", - "from aiida_workgraph import task\n", - "\n", - "#\n", - "@task.calcfunction()\n", - "def atomization_energy(output_atom, output_mol):\n", - " from aiida.orm import Float\n", - "\n", - " e = output_atom[\"energy\"] * output_mol[\"number_of_atoms\"] - output_mol[\"energy\"]\n", - " return Float(e)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "0d72cdd4", - "metadata": {}, - "outputs": [], - "source": [ - "# Create the structure of nitrogen Atom.\n", - "#\n", - "\n", - "from ase import Atoms\n", - "from aiida.orm import StructureData\n", - "\n", - "#\n", - "atoms = Atoms(\"N\")\n", - "atoms.center(vacuum=1.5)\n", - "atoms.pbc = True\n", - "structure_n = StructureData(ase=atoms)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "9b29ba1b", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Create a workgraph\n", - "# ------------------\n", - "\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "from aiida.orm import load_code\n", - "\n", - "#\n", - "# load the PW code\n", - "pw_code = load_code(\"qe-7.2-pw@localhost\")\n", - "#\n", - "wg = WorkGraph(\"atomization_energy\")\n", - "#\n", - "# create the PW task\n", - "pw_n = wg.add_task(PwCalculation, name=\"pw_n\")\n", - "pw_n.set(\n", - " {\n", - " \"code\": pw_code,\n", - " \"structure\": structure_n,\n", - " \"parameters\": paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - " }\n", - ")\n", - "pw_n2 = wg.add_task(PwCalculation, name=\"pw_n2\")\n", - "pw_n2.set(\n", - " {\n", - " \"code\": pw_code,\n", - " \"structure\": structure_n2,\n", - " \"parameters\": paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - " }\n", - ")\n", - "# create the task to calculate the atomization energy\n", - "atomization = wg.add_task(atomization_energy, name=\"atomization_energy\")\n", - "wg.add_link(pw_n.outputs[\"output_parameters\"], atomization.inputs[\"output_atom\"])\n", - "wg.add_link(pw_n2.outputs[\"output_parameters\"], atomization.inputs[\"output_mol\"])\n", - "wg.to_html()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e4fb27b4", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Submit the workgraph and print the atomization energy.\n", - "#\n", - "\n", - "\n", - "wg.submit(wait=True, timeout=300)\n", - "print(\n", - " \"Atomization energy: {:0.3f} eV\".format(atomization.outputs[\"result\"].value.value)\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "e94e6744", - "metadata": {}, - "outputs": [], - "source": [ - "# If you start the web app (`workgraph web start`), you can visit the page http://127.0.0.1:8000/workgraph to view the tasks.\n", - "#\n", - "# You can also generate node graph from the AiiDA process:\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph.utils import generate_node_graph\n", - "\n", - "generate_node_graph(wg.pk)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6b6ee792", - "metadata": {}, - "outputs": [], - "source": [ - "# Use already existing workchain\n", - "# ===============================\n", - "# Can we register a task from a workchain? Can we set the a input item of a namespace? Yes, we can!\n", - "#\n", - "# In the `PwRelaxWorkChain`, one can set the relax type (`calculation` key) in the input namespace `base.pw.parameters`. Now we create a new task to update the pw parameters.\n", - "#\n", - "\n", - "from aiida_workgraph import task\n", - "\n", - "\n", - "@task.calcfunction()\n", - "def pw_parameters(paras, relax_type):\n", - " paras1 = paras.clone()\n", - " paras1[\"CONTROL\"][\"calculation\"] = relax_type\n", - " return paras1" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1653b3aa", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Now, we create the workgraph to relax the structure of N2 molecule.\n", - "#\n", - "\n", - "from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain\n", - "\n", - "#\n", - "wg = WorkGraph(\"test_pw_relax\")\n", - "# pw task\n", - "pw_relax1 = wg.add_task(PwRelaxWorkChain, name=\"pw_relax1\")\n", - "# Load the pseudopotential family.\n", - "pseudos = pseudo_family.get_pseudos(structure=structure_n2)\n", - "pw_relax1.set(\n", - " {\n", - " \"base\": {\n", - " \"pw\": {\"code\": pw_code, \"pseudos\": pseudos, \"metadata\": metadata},\n", - " \"kpoints\": kpoints,\n", - " },\n", - " \"structure\": structure_n2,\n", - " },\n", - ")\n", - "paras_task = wg.add_task(pw_parameters, \"parameters\", paras=paras, relax_type=\"relax\")\n", - "wg.add_link(paras_task.outputs[0], pw_relax1.inputs[\"base.pw.parameters\"])\n", - "# One can submit the workgraph directly\n", - "# wg.submit(wait=True, timeout=200)\n", - "# print(\n", - "# \"\\nEnergy of a relaxed N2 molecule: {:0.3f}\".format(\n", - "# pw_relax1.node.outputs.output_parameters.get_dict()[\"energy\"]\n", - "# )\n", - "# )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a5c735ff", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Use `protocol` to set input parameters (Experimental)\n", - "# ====================================================\n", - "# The aiida-quantumespresso package supports setting input parameters from protocol. For example, the PwRelaxWorkChain has a `get_builder_from_protocol` method. In this tutorial, we will show how to use the `protocol` to set the input parameters inside the WorkGraph.\n", - "#\n", - "\n", - "from aiida_workgraph import build_task, WorkGraph\n", - "from aiida_quantumespresso.workflows.pw.relax import PwRelaxWorkChain\n", - "from ase.build import bulk\n", - "from aiida import orm\n", - "from pprint import pprint\n", - "\n", - "#\n", - "pw_code = orm.load_code(\"qe-7.2-pw@localhost\")\n", - "wg = WorkGraph(\"test_pw_relax\")\n", - "structure_si = orm.StructureData(ase=bulk(\"Si\"))\n", - "pw_relax1 = wg.add_task(PwRelaxWorkChain, name=\"pw_relax1\")\n", - "# set the inputs from the protocol\n", - "# this will call the `PwRelaxWorkChain.get_builder_from_protocol` method\n", - "# to set the inputs of the workchain\n", - "pw_relax1.set_from_protocol(\n", - " pw_code, structure_si, protocol=\"fast\", pseudo_family=\"SSSP/1.2/PBEsol/efficiency\"\n", - ")\n", - "# we can now inspect the inputs of the workchain\n", - "print(\"The inputs for the PwBaseWorkchain are:\")\n", - "print(\"-\" * 80)\n", - "pprint(pw_relax1.inputs[\"base\"].value)\n", - "print(\"\\nThe input parameters for pw are:\")\n", - "print(\"-\" * 80)\n", - "pprint(pw_relax1.inputs[\"base\"].value[\"pw\"][\"parameters\"].get_dict())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7c6bb4f5", - "metadata": {}, - "outputs": [], - "source": [ - "# One can also adjust the parameters of the `PwRelaxWorkChain` to from protocol.\n", - "#\n", - "\n", - "# For example, we want to remove the `base_final_scf` from the inputs, so that the `PwRelaxWorkChain` will not run the `base_final_scf` step.\n", - "pw_relax1.inputs[\"base_final_scf\"].value = None\n", - "# submit the workgraph\n", - "# wg.submit(wait=True, timeout=200)" - ] - } - ], - "metadata": { - "jupytext": { - "cell_metadata_filter": "-all", - "main_language": "python", - "notebook_metadata_filter": "-all" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb b/docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb deleted file mode 100644 index f86153e4..00000000 --- a/docs/sections/writing_workflows_with_workgraph/zero_to_hero.ipynb +++ /dev/null @@ -1,658 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "ee2af78b", - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"\n", - "======================================\n", - "AiiDA-WorkGraph: From Zero To Hero\n", - "======================================\n", - "\n", - "\"\"\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ac4c08ae", - "metadata": {}, - "outputs": [], - "source": [ - "# In this tutorial, you will learn `AiiDA-WorkGraph` to build your workflow to carry out DFT calculation. It's recommended to run this tutorial inside a Jupyter notebook.\n", - "#\n", - "# Requirements\n", - "# ===============\n", - "# To run this tutorial, you need to install `aiida-workgraph`, `aiida-quantumespresso`. Open a terminal and run:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# pip install aiida-workgraph[widget] aiida-quantumespresso\n", - "#\n", - "# Restart (or start) the AiiDA daemon if needed:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# verdi daemon restart\n", - "#\n", - "# Load the AiiDA profile.\n", - "#\n", - "\n", - "\n", - "from aiida import load_profile\n", - "\n", - "load_profile()\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6f014608", - "metadata": {}, - "outputs": [], - "source": [ - "# First workflow\n", - "# ===============\n", - "# Suppose we want to calculate ```(x + y) * z ``` in two steps. First, add `x` and `y`, then multiply the result with `z`.\n", - "#\n", - "# In AiiDA, we can define two `calcfunction` to do the `add` and `mutiply`:\n", - "#\n", - "\n", - "from aiida_workgraph import task\n", - "\n", - "\n", - "@task.calcfunction()\n", - "def add(x, y):\n", - " return x + y\n", - "\n", - "\n", - "@task.calcfunction()\n", - "def multiply(x, y):\n", - " return x * y" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "734ce56a", - "metadata": {}, - "outputs": [], - "source": [ - "# Create the workflow\n", - "# --------------------\n", - "# Three steps:\n", - "#\n", - "# - create a empty WorkGraph\n", - "# - add tasks: `add` and `multiply`.\n", - "# - link the output of the `add` task to the `x` input of the `multiply` task.\n", - "#\n", - "#\n", - "# In a jupyter notebook, you can visualize the workgraph directly.\n", - "#\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "\n", - "#\n", - "wg = WorkGraph(\"add_multiply_workflow\")\n", - "wg.add_task(add, name=\"add1\")\n", - "wg.add_task(multiply, name=\"multiply1\", x=wg.tasks[\"add1\"].outputs[\"result\"])\n", - "# export the workgraph to html file so that it can be visualized in a browser\n", - "wg.to_html()\n", - "# visualize the workgraph in jupyter-notebook\n", - "# wg\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d680cb0a", - "metadata": {}, - "outputs": [], - "source": [ - "# Submit the workgraph\n", - "# -------------------------\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph.utils import generate_node_graph\n", - "from aiida.orm import Int\n", - "\n", - "#\n", - "# ------------------------- Submit the calculation -------------------\n", - "wg.submit(\n", - " inputs={\"add1\": {\"x\": Int(2), \"y\": Int(3)}, \"multiply1\": {\"y\": Int(4)}}, wait=True\n", - ")\n", - "# ------------------------- Print the output -------------------------\n", - "assert wg.tasks[\"multiply1\"].outputs[\"result\"].value == 20\n", - "print(\n", - " \"\\nResult of multiply1 is {} \\n\\n\".format(\n", - " wg.tasks[\"multiply1\"].outputs[\"result\"].value\n", - " )\n", - ")\n", - "# ------------------------- Generate node graph -------------------\n", - "generate_node_graph(wg.pk)\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "24704a56", - "metadata": {}, - "outputs": [], - "source": [ - "# CalcJob and WorkChain\n", - "# =======================\n", - "# AiiDA uses `CalcJob` to run a calculation on a remote computer. AiiDA community also provides a lot of well-written `calcfunction` and `WorkChain`. One can use these AiiDA component direclty in the WorkGraph. The inputs and outputs of the task is automatically generated based on the input and output port of the AiiDA component.\n", - "#\n", - "# Here is an example of using the `ArithmeticAddCalculation` Calcjob inside the workgraph. Suppose we want to calculate ```(x + y) + z ``` in two steps.\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "from aiida.calculations.arithmetic.add import ArithmeticAddCalculation\n", - "\n", - "#\n", - "wg = WorkGraph(\"test_calcjob\")\n", - "new = wg.add_task\n", - "new(ArithmeticAddCalculation, name=\"add1\")\n", - "wg.add_task(ArithmeticAddCalculation, name=\"add2\", x=wg.tasks[\"add1\"].outputs[\"sum\"])\n", - "wg.to_html()\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5475f286", - "metadata": {}, - "outputs": [], - "source": [ - "# Inspect the node\n", - "# ----------------\n", - "# How do I know which input and output to connect?\n", - "#\n", - "# The inputs and outputs of a task are generated automatically based on the inputs/outputs of the AiiDA component. WorkGraph also has some built-in ports, like `_wait` and `_outputs`. One can inpsect a task's inputs and outputs.\n", - "#\n", - "# Note: special case for `calcfunction`, the default name of its output is `result`.\n", - "#\n", - "\n", - "\n", - "# visualize the task\n", - "wg.tasks[\"add1\"].to_html()\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ad64a224", - "metadata": {}, - "outputs": [], - "source": [ - "# First Real-world Workflow: atomization energy of molecule\n", - "# ==========================================================\n", - "#\n", - "# The atomization energy, $\\Delta E$, of a molecule can be expressed as:\n", - "#\n", - "# .. math::\n", - "#\n", - "# \\Delta E = n_{\\text{atom}} \\times E_{\\text{atom}} - E_{\\text{molecule}}\n", - "#\n", - "# Where:\n", - "#\n", - "# - :math:`\\Delta E` is the atomization energy of the molecule.\n", - "# - :math:`n_{\\text{atom}}` is the number of atoms.\n", - "# - :math:`E_{\\text{atom}}` is the energy of an isolated atom.\n", - "# - :math:`E_{\\text{molecule}}` is the energy of the molecule.\n", - "#\n", - "# Define a workgraph\n", - "# -------------------\n", - "# aiida-quantumespresso provides `PwCalculation` CalcJob and `PwBaseWorkChain` to run a PW calculation. we can use it directly in the WorkGraph. Here we use the `PwCalculation` CalcJob.\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "from aiida.engine import calcfunction\n", - "from aiida_quantumespresso.calculations.pw import PwCalculation\n", - "\n", - "#\n", - "\n", - "\n", - "@calcfunction\n", - "def atomization_energy(output_atom, output_mol):\n", - " from aiida.orm import Float\n", - "\n", - " e = output_atom[\"energy\"] * output_mol[\"number_of_atoms\"] - output_mol[\"energy\"]\n", - " return Float(e)\n", - "\n", - "\n", - "#\n", - "wg = WorkGraph(\"atomization_energy\")\n", - "pw_atom = wg.add_task(PwCalculation, name=\"pw_atom\")\n", - "pw_mol = wg.add_task(PwCalculation, name=\"pw_mol\")\n", - "# create the task to calculate the atomization energy\n", - "wg.add_task(\n", - " atomization_energy,\n", - " name=\"atomization_energy\",\n", - " output_atom=pw_atom.outputs[\"output_parameters\"],\n", - " output_mol=pw_mol.outputs[\"output_parameters\"],\n", - ")\n", - "# export the workgraph to html file so that it can be visualized in a browser\n", - "wg.to_html()\n", - "# visualize the workgraph in jupyter-notebook\n", - "# wg\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fbc973db", - "metadata": {}, - "outputs": [], - "source": [ - "# Prepare the inputs and submit the workflow\n", - "# -------------------------------------------\n", - "# You need to set up the code, computer, and pseudo potential for the calculation. Please refer to the this [documentation](https://aiida-quantumespresso.readthedocs.io/en/latest/installation/index.html) for more details.\n", - "#\n", - "# You can also stip this step.\n", - "#\n", - "\n", - "from aiida import load_profile\n", - "from aiida.common.exceptions import NotExistent\n", - "from aiida.orm import (\n", - " Dict,\n", - " KpointsData,\n", - " StructureData,\n", - " load_code,\n", - " load_group,\n", - " InstalledCode,\n", - " load_computer,\n", - ")\n", - "from ase.build import molecule\n", - "from ase import Atoms\n", - "\n", - "#\n", - "load_profile()\n", - "# create pw code\n", - "try:\n", - " pw_code = load_code(\n", - " \"qe-7.2-pw@localhost\"\n", - " ) # The computer label can also be omitted here\n", - "except NotExistent:\n", - " pw_code = InstalledCode(\n", - " computer=load_computer(\"localhost\"),\n", - " filepath_executable=\"pw.x\",\n", - " label=\"qe-7.2-pw\",\n", - " default_calc_job_plugin=\"quantumespresso.pw\",\n", - " ).store()\n", - "# create structure\n", - "n_atom = Atoms(\"N\")\n", - "n_atom.center(vacuum=1.5)\n", - "n_atom.pbc = True\n", - "structure_n = StructureData(ase=n_atom)\n", - "structure_n2 = StructureData(ase=molecule(\"N2\", vacuum=1.5, pbc=True))\n", - "# create the PW task\n", - "paras = Dict(\n", - " {\n", - " \"CONTROL\": {\n", - " \"calculation\": \"scf\",\n", - " },\n", - " \"SYSTEM\": {\n", - " \"ecutwfc\": 30,\n", - " \"ecutrho\": 240,\n", - " \"occupations\": \"smearing\",\n", - " \"smearing\": \"gaussian\",\n", - " \"degauss\": 0.1,\n", - " },\n", - " }\n", - ")\n", - "kpoints = KpointsData()\n", - "kpoints.set_kpoints_mesh([1, 1, 1])\n", - "# Load the pseudopotential family.\n", - "pseudo_family = load_group(\"SSSP/1.3/PBEsol/efficiency\")\n", - "pseudos = pseudo_family.get_pseudos(structure=structure_n2)\n", - "#\n", - "metadata = {\n", - " \"options\": {\n", - " \"resources\": {\n", - " \"num_machines\": 1,\n", - " \"num_mpiprocs_per_machine\": 1,\n", - " },\n", - " }\n", - "}\n", - "#\n", - "# ------------------------- Set the inputs -------------------------\n", - "wg.tasks[\"pw_atom\"].set(\n", - " {\n", - " \"code\": pw_code,\n", - " \"structure\": structure_n,\n", - " \"parameters\": paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - " }\n", - ")\n", - "wg.tasks[\"pw_mol\"].set(\n", - " {\n", - " \"code\": pw_code,\n", - " \"structure\": structure_n2,\n", - " \"parameters\": paras,\n", - " \"kpoints\": kpoints,\n", - " \"pseudos\": pseudos,\n", - " \"metadata\": metadata,\n", - " }\n", - ")\n", - "# ------------------------- Submit the calculation -------------------\n", - "wg.submit(wait=True, timeout=200)\n", - "# ------------------------- Print the output -------------------------\n", - "print(\n", - " \"Energy of a N atom: {:0.3f}\".format(\n", - " wg.tasks[\"pw_atom\"].outputs[\"output_parameters\"].value.get_dict()[\"energy\"]\n", - " )\n", - ")\n", - "print(\n", - " \"Energy of an un-relaxed N2 molecule: {:0.3f}\".format(\n", - " wg.tasks[\"pw_mol\"].outputs[\"output_parameters\"].value.get_dict()[\"energy\"]\n", - " )\n", - ")\n", - "print(\n", - " \"Atomization energy: {:0.3f} eV\".format(\n", - " wg.tasks[\"atomization_energy\"].outputs[\"result\"].value.value\n", - " )\n", - ")\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6f7afec0", - "metadata": {}, - "outputs": [], - "source": [ - "# Generate node graph from the AiiDA process:\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph.utils import generate_node_graph\n", - "\n", - "generate_node_graph(wg.pk)\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c546090b", - "metadata": {}, - "outputs": [], - "source": [ - "# Advanced Topic: Dynamic Workgraph\n", - "# ==================================\n", - "#\n", - "# Graph builder\n", - "# --------------\n", - "# If we want to generate the workgraph on-the-fly, for example, if you want to use `if` to create the tasks, or repeat a calculation until it converges, you can use Graph Builder.\n", - "#\n", - "# Suppose we want to calculate:\n", - "#\n", - "# .. code-block:: python\n", - "#\n", - "# # step 1\n", - "# result = add(x, y)\n", - "# # step 2\n", - "# if result > 0:\n", - "# result = add(result, y)\n", - "# else:\n", - "# result = multiply(result, y)\n", - "# # step 3\n", - "# result = add(result, y)\n", - "#\n", - "\n", - "\n", - "# Create a WorkGraph which is dynamically generated based on the input\n", - "# then we output the result of from the context\n", - "from aiida_workgraph import task\n", - "\n", - "#\n", - "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"context.result\"}])\n", - "def add_multiply_if_generator(x, y):\n", - " wg = WorkGraph()\n", - " if x.value > 0:\n", - " add1 = wg.add_task(add, name=\"add1\", x=x, y=y)\n", - " # export the result of add1 to the context, so that context.result = add1.results\n", - " add1.set_context({\"result\": \"result\"})\n", - " else:\n", - " multiply1 = wg.add_task(multiply, name=\"multiply1\", x=x, y=y)\n", - " # export the result of multiply1 to the context\n", - " multiply1.set_context({\"result\": \"result\"})\n", - " return wg\n", - "\n", - "\n", - "#\n", - "wg = WorkGraph(\"if_task\")\n", - "wg.add_task(add, name=\"add1\")\n", - "wg.add_task(\n", - " add_multiply_if_generator,\n", - " name=\"add_multiply_if1\",\n", - " x=wg.tasks[\"add1\"].outputs[\"result\"],\n", - ")\n", - "wg.add_task(add, name=\"add2\", x=wg.tasks[\"add_multiply_if1\"].outputs[\"result\"])\n", - "wg.to_html()\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "52bf9f07", - "metadata": { - "lines_to_next_cell": 0 - }, - "outputs": [], - "source": [ - "# Submit the WorkGraph\n", - "\n", - "\n", - "wg.submit(\n", - " inputs={\n", - " \"add1\": {\"x\": 1, \"y\": 2},\n", - " \"add_multiply_if1\": {\"y\": 2},\n", - " \"add2\": {\"y\": 2},\n", - " },\n", - " wait=True,\n", - ")\n", - "# ------------------------- Print the output -------------------------\n", - "assert wg.tasks[\"add2\"].outputs[\"result\"].value == 7\n", - "print(\"\\nResult of add2 is {} \\n\\n\".format(wg.tasks[\"add2\"].outputs[\"result\"].value))\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5947d2e2", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Note: one can not see the detail of the `add_multiply_if1` before you running it.\n", - "#\n", - "# Second Real-world Workflow: Equation of state (EOS) WorkGraph\n", - "# =============================================================\n", - "#\n", - "# First, create the calcfunction for the job.\n", - "#\n", - "\n", - "from aiida import orm\n", - "from aiida_workgraph import task\n", - "\n", - "#\n", - "# explicitly define the output socket name to match the return value of the function\n", - "@task.calcfunction(outputs=[{\"name\": \"structures\"}])\n", - "def scale_structure(structure, scales):\n", - " \"\"\"Scale the structure by the given scales.\"\"\"\n", - " atoms = structure.get_ase()\n", - " structures = {}\n", - " for i in range(len(scales)):\n", - " atoms1 = atoms.copy()\n", - " atoms1.set_cell(atoms.cell * scales[i], scale_atoms=True)\n", - " structure = orm.StructureData(ase=atoms1)\n", - " structures[f\"s_{i}\"] = structure\n", - " return {\"structures\": structures}\n", - "\n", - "\n", - "#\n", - "# Output result from context to the output socket\n", - "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"context.result\"}])\n", - "def all_scf(structures, scf_inputs):\n", - " \"\"\"Run the scf calculation for each structure.\"\"\"\n", - " from aiida_workgraph import WorkGraph\n", - " from aiida_quantumespresso.calculations.pw import PwCalculation\n", - "\n", - " wg = WorkGraph()\n", - " for key, structure in structures.items():\n", - " pw1 = wg.add_task(PwCalculation, name=f\"pw1_{key}\", structure=structure)\n", - " pw1.set(scf_inputs)\n", - " # save the output parameters to the context\n", - " pw1.set_context({\"output_parameters\": f\"result.{key}\"})\n", - " return wg\n", - "\n", - "\n", - "#\n", - "\n", - "\n", - "@task.calcfunction()\n", - "# because this is a calcfunction, and the input datas are dynamic, we need use **datas.\n", - "def eos(**datas):\n", - " \"\"\"Fit the EOS of the data.\"\"\"\n", - " from ase.eos import EquationOfState\n", - "\n", - " #\n", - " volumes = []\n", - " energies = []\n", - " for _, data in datas.items():\n", - " volumes.append(data.dict.volume)\n", - " energies.append(data.dict.energy)\n", - " unit = data.dict.energy_units\n", - " #\n", - " eos = EquationOfState(volumes, energies)\n", - " v0, e0, B = eos.fit()\n", - " eos = orm.Dict({\"unit\": unit, \"v0\": v0, \"e0\": e0, \"B\": B})\n", - " return eos" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f26e396b", - "metadata": {}, - "outputs": [], - "source": [ - "# Define the WorkGraph\n", - "# ----------------------\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph import WorkGraph\n", - "\n", - "#\n", - "wg = WorkGraph(\"eos\")\n", - "scale_structure1 = wg.add_task(scale_structure, name=\"scale_structure1\")\n", - "all_scf1 = wg.add_task(\n", - " all_scf, name=\"all_scf1\", structures=scale_structure1.outputs[\"structures\"]\n", - ")\n", - "eos1 = wg.add_task(eos, name=\"eos1\", datas=all_scf1.outputs[\"result\"])\n", - "wg.to_html()\n", - "#" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7e0d9d7e", - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "# Combine with a relax task\n", - "# --------------------------\n", - "#\n", - "\n", - "\n", - "from aiida_workgraph import WorkGraph, task\n", - "from aiida_quantumespresso.calculations.pw import PwCalculation\n", - "\n", - "#\n", - "@task.graph_builder(outputs=[{\"name\": \"result\", \"from\": \"eos1.result\"}])\n", - "def eos_workgraph(structure=None, scales=None, scf_inputs=None):\n", - " wg = WorkGraph(\"eos\")\n", - " scale_structure1 = wg.add_task(\n", - " scale_structure, name=\"scale_structure1\", structure=structure, scales=scales\n", - " )\n", - " all_scf1 = wg.add_task(all_scf, name=\"all_scf1\", scf_inputs=scf_inputs)\n", - " eos1 = wg.add_task(eos, name=\"eos1\")\n", - " wg.add_link(scale_structure1.outputs[\"structures\"], all_scf1.inputs[\"structures\"])\n", - " wg.add_link(all_scf1.outputs[\"result\"], eos1.inputs[\"datas\"])\n", - " return wg\n", - "\n", - "\n", - "#\n", - "\n", - "# -------------------------------------------------------\n", - "wg = WorkGraph(\"relax_eos\")\n", - "relax_task = wg.add_task(PwCalculation, name=\"relax1\")\n", - "eos_wg_task = wg.add_task(\n", - " eos_workgraph, name=\"eos1\", structure=relax_task.outputs[\"output_structure\"]\n", - ")\n", - "wg.to_html()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "105f4dca", - "metadata": {}, - "outputs": [], - "source": [ - "# Useful tool: Web GUI\n", - "# =====================\n", - "# Open a terminal, and run:\n", - "#\n", - "# .. code-block:: console\n", - "#\n", - "# workgraph web start\n", - "#\n", - "# Then visit the page `http://127.0.0.1:8000/workgraph`, where you can view all the workgraphs.\n", - "#\n", - "# What's Next\n", - "# ===========\n", - "#\n", - "# +-----------------------------------------------------+-----------------------------------------------------------------------------+\n", - "# | `Concepts <../../concept/index.html>`_ | A brief introduction of WorkGraph’s main concepts. |\n", - "# +-----------------------------------------------------+-----------------------------------------------------------------------------+\n", - "# | `HowTo <../../howto/index.html>`_ | Advanced topics, e.g., flow control using `if`, `while`, and `context`. |\n", - "# +-----------------------------------------------------+-----------------------------------------------------------------------------+\n", - "#" - ] - } - ], - "metadata": { - "jupytext": { - "cell_metadata_filter": "-all", - "main_language": "python", - "notebook_metadata_filter": "-all" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/requirements.txt b/requirements.txt index 7fcb6988..bcf2a426 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,8 +4,9 @@ sphinx-book-theme~=1.1.3 sphinx-copybutton~=0.5.2 sphinx-panels~=0.4.1 sphinx-tabs~=3.4.5 +sphinx-gallery myst-nb~=1.1.1 # to run notebooks aiida-quantumespresso aiida-pseudo -aiida-workgraph +aiida-workgraph[widget] From 1ea66feaefa3d68a1b5ec95f84590fa30ad56769 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 09:42:11 +0200 Subject: [PATCH 09/39] debug sphinx linkcheck --- docs/conf.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 9f77b3bb..b1121c2e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -412,14 +412,22 @@ def setup(app): gallery_src_relative_dir = ( "gallery" # relative path of the gallery src wrt. sphinx src ) -sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"] -# we mimik the structure in the sphinx src directory in the gallery src directory +# debug +print("SPHINX_LINKCHECK", os.getenv("SPHINX_LINKCHECK")) + +# We don't want to build the gallery if sphinx linkcheck is only used +if os.getenv("SPHINX_LINKCHECK"): + sphinx_src_autogen_dirs = [] +else: + sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"] + -# path of python scripts that should be executed + +# we mimik the structure in the sphinx src directory in the gallery src directory gallery_src_dirs = [ os.path.join(gallery_src_relative_dir, autogen_dir) for autogen_dir in sphinx_src_autogen_dirs -] +] # path of the python scripts that should be executed sphinx_gallery_conf = { "filename_pattern": "/*", "examples_dirs": gallery_src_dirs, # in sphinx-gallery doc referred as gallery source From 4bdba12621a1291234b392da5caa671f8ffca74b Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 09:42:19 +0200 Subject: [PATCH 10/39] fix warnings --- .../autogen/GALLERY_HEADER.rst | 2 ++ .../writing_workflows_with_workgraph/autogen/qe.py | 10 +++++----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst index ef04f7d6..71f830dd 100644 --- a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst +++ b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/GALLERY_HEADER.rst @@ -1,3 +1,5 @@ +:orphan: + ================================ Running workflows with WorkGraph ================================ diff --git a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py index aaab06bb..3cc2861f 100644 --- a/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py +++ b/docs/gallery/sections/writing_workflows_with_workgraph/autogen/qe.py @@ -1,7 +1,7 @@ """ -================================ +=============================== Computational materials science -================================ +=============================== """ @@ -48,7 +48,7 @@ # First workflow: calculate the energy of N2 molecule # =================================================== # Define a workgraph -# ------------------- +# ------------------ # aiida-quantumespresso provides a CalcJob: `PwCalculation` to run a PW calculation. we can use it directly in the WorkGraph. The inputs and outputs of the task is automatically generated based on the `PwCalculation` CalcJob. # @@ -267,7 +267,7 @@ def atomization_energy(output_atom, output_mol): # %% # Use already existing workchain -# =============================== +# ============================== # Can we register a task from a workchain? Can we set the a input item of a namespace? Yes, we can! # # In the `PwRelaxWorkChain`, one can set the relax type (`calculation` key) in the input namespace `base.pw.parameters`. Now we create a new task to update the pw parameters. @@ -317,7 +317,7 @@ def pw_parameters(paras, relax_type): # %% # Use `protocol` to set input parameters (Experimental) -# ==================================================== +# ===================================================== # The aiida-quantumespresso package supports setting input parameters from protocol. For example, the PwRelaxWorkChain has a `get_builder_from_protocol` method. In this tutorial, we will show how to use the `protocol` to set the input parameters inside the WorkGraph. # From 2b2c43e5b31e5c0da622cf2f8986aa68ce86c039 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:13:14 +0200 Subject: [PATCH 11/39] need to build the sphinx doc because the index.rst references the autogen files; add conda action to install environment.yml --- .github/workflows/ci.yml | 15 ++++++--------- docs/conf.py | 14 +++----------- 2 files changed, 9 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 132299b8..2f293792 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,18 +24,15 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 3.12 - uses: actions/setup-python@v2 + - uses: conda-incubator/setup-miniconda@v3 with: - python-version: 3.12 - - name: Install python dependencies - run: | - pip install --upgrade pip - pip install -r requirements.txt + miniconda-version: "24.5.0" + activate-environment: base + environment-file: environment.yml + python-version: "3.12" + auto-activate-base: true - name: "Build HTML docs" run: | - echo "SPHINX_LINKCHECK $SPHINX_LINKCHECK" make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going - SPHINX_LINKCHECK: on diff --git a/docs/conf.py b/docs/conf.py index b1121c2e..c788d691 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -69,7 +69,7 @@ ipython_mplbackend = "" copybutton_selector = "div:not(.no-copy)>div.highlight pre" -copybutton_prompt_text = ">>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: " +copybutton_prompt_text = ">>> |... |$ |In [d*]: | {2,5}...: | {5,8}: " copybutton_prompt_is_regexp = True todo_include_todos = True @@ -412,18 +412,10 @@ def setup(app): gallery_src_relative_dir = ( "gallery" # relative path of the gallery src wrt. sphinx src ) -# debug -print("SPHINX_LINKCHECK", os.getenv("SPHINX_LINKCHECK")) - -# We don't want to build the gallery if sphinx linkcheck is only used -if os.getenv("SPHINX_LINKCHECK"): - sphinx_src_autogen_dirs = [] -else: - sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"] - - # we mimik the structure in the sphinx src directory in the gallery src directory +sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"] + gallery_src_dirs = [ os.path.join(gallery_src_relative_dir, autogen_dir) for autogen_dir in sphinx_src_autogen_dirs From e52d86ca9677dfee12234ec6e4a8bedf662984a7 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:13:25 +0200 Subject: [PATCH 12/39] add nicer front page --- .../index.rst | 86 +++++++++++++++++++ 1 file changed, 86 insertions(+) diff --git a/docs/sections/writing_workflows_with_workgraph/index.rst b/docs/sections/writing_workflows_with_workgraph/index.rst index 3314cbbd..9c08162e 100644 --- a/docs/sections/writing_workflows_with_workgraph/index.rst +++ b/docs/sections/writing_workflows_with_workgraph/index.rst @@ -1,6 +1,92 @@ Writing workflows with WorkGraph ================================ +.. rst-class:: header-text + + Recently a new way to create workflows has been developed, the *workgraph*. + The workgraph should simplify the creation of the and provide a more user + friendly GUI that provides information about your workflow before + execution. In this section you will learn how to write different examples + with the workgraph. + +.. panels:: + :header: panel-header-text + :body: bg-light + :footer: bg-light border-0 + + ------ + :column: col-lg-12 + + .. link-button:: zero_to_hero + :type: ref + :text: Zero to hero + :classes: btn-light text-left stretched-link font-weight-bold + ^^^^^^^^^^^^ + + A short module on how to write the basic type of workflows in AiiDA: work functions. + The module also revises the usage of calculation functions to add simple Python functions to the provenance. + + +++++++++++++ + .. list-table:: + :widths: 50 50 + :class: footer-table + :header-rows: 0 + + * - |time| 30 min + - |aiida| :aiida-green:`Basic` + +.. panels:: + :header: panel-header-text + :body: bg-light + :footer: bg-light border-0 + + ------ + :column: col-lg-12 + + .. link-button:: qe + :type: ref + :text: Computational materials science + :classes: btn-light text-left stretched-link font-weight-bold + ^^^^^^^^^^^^ + + A step-by-step introduction to the basics of writing work chains in AiiDA. + After completing this module, you will be ready to start writing your own scientific workflows! + + +++++++++++++ + .. list-table:: + :widths: 50 50 + :class: footer-table + :header-rows: 0 + + * - |time| 60 min + - |aiida| :aiida-green:`Intermediate` + + +.. panels:: + :header: panel-header-text + :body: bg-light + :footer: bg-light border-0 + + ------ + :column: col-lg-12 + + .. link-button:: eos + :type: ref + :text: A Real-world example - Equation of state + :classes: btn-light text-left stretched-link font-weight-bold + ^^^^^^^^^^^^ + + A step-by-step introduction to the basics of writing work chains in AiiDA. + After completing this module, you will be ready to start writing your own scientific workflows! + + +++++++++++++ + .. list-table:: + :widths: 50 50 + :class: footer-table + :header-rows: 0 + + * - |time| 60 min + - |aiida| :aiida-green:`Intermediate` .. toctree:: :hidden: From 5b48209ba493061a13016025a3259dfba596b3af Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:14:16 +0200 Subject: [PATCH 13/39] add imports to conf.py --- docs/conf.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/conf.py b/docs/conf.py index c788d691..f5ba5f23 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -443,6 +443,8 @@ def setup(app): ) ) +import shutil +from pathlib import Path def copy_html_files(app, exception): """ Copy all .html files from source to build directory, maintaining the directory structure. From 8e67630a8f909797cb2501100e6db6f4f4a22dbb Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:15:38 +0200 Subject: [PATCH 14/39] update conda version --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2f293792..0da8d744 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ jobs: - uses: actions/checkout@v2 - uses: conda-incubator/setup-miniconda@v3 with: - miniconda-version: "24.5.0" + miniconda-version: "Miniconda3-py312_24.5.0-0-Linux-x86_64.sh" activate-environment: base environment-file: environment.yml python-version: "3.12" From 4db06913f073647e497f0932ca64c032fcb392df Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:16:30 +0200 Subject: [PATCH 15/39] pre-commit run --- .github/workflows/ci.yml | 2 +- docs/conf.py | 11 +++++------ .../writing_workflows_with_workgraph/index.rst | 3 +-- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0da8d744..8ce31864 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,7 +27,7 @@ jobs: - uses: conda-incubator/setup-miniconda@v3 with: miniconda-version: "Miniconda3-py312_24.5.0-0-Linux-x86_64.sh" - activate-environment: base + activate-environment: base environment-file: environment.yml python-version: "3.12" auto-activate-base: true diff --git a/docs/conf.py b/docs/conf.py index f5ba5f23..238dadca 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -42,7 +42,7 @@ "sphinx_copybutton", "sphinx_panels", "sphinx_tabs.tabs", - "sphinx_gallery.gen_gallery" + "sphinx_gallery.gen_gallery", ] myst_enable_extensions = [ @@ -409,9 +409,7 @@ def setup(app): "plumpy": ("https://plumpy.readthedocs.io/en/latest/", None), } -gallery_src_relative_dir = ( - "gallery" # relative path of the gallery src wrt. sphinx src -) +gallery_src_relative_dir = "gallery" # relative path of the gallery src wrt. sphinx src # we mimik the structure in the sphinx src directory in the gallery src directory sphinx_src_autogen_dirs = ["sections/writing_workflows_with_workgraph/autogen"] @@ -419,7 +417,7 @@ def setup(app): gallery_src_dirs = [ os.path.join(gallery_src_relative_dir, autogen_dir) for autogen_dir in sphinx_src_autogen_dirs -] # path of the python scripts that should be executed +] # path of the python scripts that should be executed sphinx_gallery_conf = { "filename_pattern": "/*", "examples_dirs": gallery_src_dirs, # in sphinx-gallery doc referred as gallery source @@ -445,6 +443,8 @@ def setup(app): import shutil from pathlib import Path + + def copy_html_files(app, exception): """ Copy all .html files from source to build directory, maintaining the directory structure. @@ -489,4 +489,3 @@ def copy_html_files(app, exception): print(f"Copy {html_file} to {destination_file}") except Exception as e: print(f"Failed to copy HTML files: {e}") - diff --git a/docs/sections/writing_workflows_with_workgraph/index.rst b/docs/sections/writing_workflows_with_workgraph/index.rst index 9c08162e..e6cdc925 100644 --- a/docs/sections/writing_workflows_with_workgraph/index.rst +++ b/docs/sections/writing_workflows_with_workgraph/index.rst @@ -19,7 +19,7 @@ Writing workflows with WorkGraph .. link-button:: zero_to_hero :type: ref - :text: Zero to hero + :text: Zero to hero :classes: btn-light text-left stretched-link font-weight-bold ^^^^^^^^^^^^ @@ -94,4 +94,3 @@ Writing workflows with WorkGraph autogen/zero_to_hero autogen/qe autogen/eos - From 1fa3aaf2aa9efab860a54ce3f601db30f12b216f Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:18:31 +0200 Subject: [PATCH 16/39] use latest --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8ce31864..3a950cc5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ jobs: - uses: actions/checkout@v2 - uses: conda-incubator/setup-miniconda@v3 with: - miniconda-version: "Miniconda3-py312_24.5.0-0-Linux-x86_64.sh" + miniconda-version: "latest" # Miniconda3-py312_24.5.0-0-Linux-x86_64 activate-environment: base environment-file: environment.yml python-version: "3.12" From 798304e6835079780447672faa0e61636253bc95 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:20:15 +0200 Subject: [PATCH 17/39] update CI run policy, it was run twice before --- .github/workflows/ci.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3a950cc5..45ac7228 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,11 @@ name: continuous-integration -on: [push, pull_request] +on: + push: + # only pushes to main trigger + branches: [main] + pull_request: + # always triggered jobs: From c87d18fcc8cb49659728b3c818d7db1aef007cb0 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:22:44 +0200 Subject: [PATCH 18/39] maybe auto update solves libmamba solver issue --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 45ac7228..ebe781ac 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,6 +35,7 @@ jobs: activate-environment: base environment-file: environment.yml python-version: "3.12" + auto-update-conda: true auto-activate-base: true - name: "Build HTML docs" run: | From adfd2c0580435a86e86232b9bdf58afd819a96e4 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:34:19 +0200 Subject: [PATCH 19/39] use minifore + mamba to see if libmambas solver is there --- .github/workflows/ci.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ebe781ac..5b8ad7af 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,12 +31,13 @@ jobs: - uses: actions/checkout@v2 - uses: conda-incubator/setup-miniconda@v3 with: - miniconda-version: "latest" # Miniconda3-py312_24.5.0-0-Linux-x86_64 - activate-environment: base + miniforge-version: latest + use-mamba: true + channels: conda-forge, defaults environment-file: environment.yml python-version: "3.12" - auto-update-conda: true auto-activate-base: true + activate-environment: "" - name: "Build HTML docs" run: | make -C docs html linkcheck From 86731fd6b538afa817751c8cb2ffcc7c2bef1dc4 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:34:26 +0200 Subject: [PATCH 20/39] fix path --- docs/sections/writing_workflows_with_workgraph/index.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/sections/writing_workflows_with_workgraph/index.rst b/docs/sections/writing_workflows_with_workgraph/index.rst index e6cdc925..ca37d3b8 100644 --- a/docs/sections/writing_workflows_with_workgraph/index.rst +++ b/docs/sections/writing_workflows_with_workgraph/index.rst @@ -17,7 +17,7 @@ Writing workflows with WorkGraph ------ :column: col-lg-12 - .. link-button:: zero_to_hero + .. link-button:: autogen/zero_to_hero :type: ref :text: Zero to hero :classes: btn-light text-left stretched-link font-weight-bold @@ -43,7 +43,7 @@ Writing workflows with WorkGraph ------ :column: col-lg-12 - .. link-button:: qe + .. link-button:: autogen/qe :type: ref :text: Computational materials science :classes: btn-light text-left stretched-link font-weight-bold @@ -70,7 +70,7 @@ Writing workflows with WorkGraph ------ :column: col-lg-12 - .. link-button:: eos + .. link-button:: autogen/eos :type: ref :text: A Real-world example - Equation of state :classes: btn-light text-left stretched-link font-weight-bold From 102d5e8342b3f5fd664c976d7560ce320a796041 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:41:18 +0200 Subject: [PATCH 21/39] add copy html files to setup --- .github/workflows/ci.yml | 2 +- docs/conf.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5b8ad7af..77e156f7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: environment-file: environment.yml python-version: "3.12" auto-activate-base: true - activate-environment: "" + activate-environment: true - name: "Build HTML docs" run: | make -C docs html linkcheck diff --git a/docs/conf.py b/docs/conf.py index 238dadca..68572168 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -489,3 +489,6 @@ def copy_html_files(app, exception): print(f"Copy {html_file} to {destination_file}") except Exception as e: print(f"Failed to copy HTML files: {e}") + +def setup(app): + app.connect("build-finished", copy_html_files) From 42ff5140847d94097537793ef5a7d552e43e9832 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:42:18 +0200 Subject: [PATCH 22/39] pre-commit run --- docs/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/conf.py b/docs/conf.py index 68572168..9cf433c3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -490,5 +490,6 @@ def copy_html_files(app, exception): except Exception as e: print(f"Failed to copy HTML files: {e}") + def setup(app): app.connect("build-finished", copy_html_files) From 43f6116d1f1862ead1758566e9df86b8e9b1f7ad Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:51:25 +0200 Subject: [PATCH 23/39] downgrade to python 3.11 due to some conflicts --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 77e156f7..4db782e6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,10 +16,10 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Set up Python 3.12 + - name: Set up Python 3.11 uses: actions/setup-python@v2 with: - python-version: 3.12 + python-version: 3.11 - uses: pre-commit/action@v2.0.0 build: @@ -35,7 +35,7 @@ jobs: use-mamba: true channels: conda-forge, defaults environment-file: environment.yml - python-version: "3.12" + python-version: "3.11" auto-activate-base: true activate-environment: true - name: "Build HTML docs" From 12e28724b33b6e36bafed215c1bd0f49387efd17 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 10:54:24 +0200 Subject: [PATCH 24/39] update README --- README.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 60d0bb65..a908323b 100644 --- a/README.md +++ b/README.md @@ -26,17 +26,19 @@ If you have a question, feel free to just [open an issue](https://github.com/aii ### Prerequisites -* python 3.12 or greater +* python 3.11 ### Build instructions ```bash git clone https://github.com/aiidateam/aiida-tutorials.git cd aiida-tutorials -pip install -r requirements.txt +conda env create --quiet --name aiida-tutorials --file environment.yml +conda activate aiida-tutorials pre-commit install # enable pre-commit hooks (optional) -cd docs/ -make +make -C docs html # to build docs +make -C docs html linkcheck # to run link checks + # open build/html/index.html ``` From 3a2c005127f49632d6ec6d5cadba3a462476c8c6 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:02:07 +0200 Subject: [PATCH 25/39] debug ci --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4db782e6..e9a2a114 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -40,6 +40,8 @@ jobs: activate-environment: true - name: "Build HTML docs" run: | + ls + tree make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going From 8049b8c57af1330bd02aaf217cf26724be1425b2 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:04:21 +0200 Subject: [PATCH 26/39] pre-commit run --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a908323b..063badb6 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ If you have a question, feel free to just [open an issue](https://github.com/aii ```bash git clone https://github.com/aiidateam/aiida-tutorials.git cd aiida-tutorials -conda env create --quiet --name aiida-tutorials --file environment.yml +conda env create --quiet --name aiida-tutorials --file environment.yml conda activate aiida-tutorials pre-commit install # enable pre-commit hooks (optional) make -C docs html # to build docs From ef349ac6a3feacabb56a1980f561099e6e1b15c0 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:06:41 +0200 Subject: [PATCH 27/39] debug ci --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e9a2a114..151417cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: environment-file: environment.yml python-version: "3.11" auto-activate-base: true - activate-environment: true + activate-environment: "" - name: "Build HTML docs" run: | ls From 0a06c78045cd89a410e1018e441b8d5bc8eaf480 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:35:35 +0200 Subject: [PATCH 28/39] debug ci --- .github/workflows/ci.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 151417cb..94eda25c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,11 +37,15 @@ jobs: environment-file: environment.yml python-version: "3.11" auto-activate-base: true - activate-environment: "" + activate-environment: base - name: "Build HTML docs" run: | ls tree + echo $CONDA_DEFAULT_ENV + echo $CONDA_PREFIX + conda info + conda list make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going From 15fb57ada8d8b090ca823ba5bdca5742b6e8c917 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:44:55 +0200 Subject: [PATCH 29/39] debug ci --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 94eda25c..8d698ac2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,7 +37,7 @@ jobs: environment-file: environment.yml python-version: "3.11" auto-activate-base: true - activate-environment: base + activate-environment: true - name: "Build HTML docs" run: | ls From a4cac3a98167dab73c3e4498a94da7f2df90d0b4 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:50:18 +0200 Subject: [PATCH 30/39] debug ci --- .github/workflows/ci.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8d698ac2..7adbcf54 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,6 +46,8 @@ jobs: echo $CONDA_PREFIX conda info conda list + conde env list + conda activate base make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going From f89a812029153a9ef5561fa463ddf4569650c61a Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 11:57:20 +0200 Subject: [PATCH 31/39] debug ci --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7adbcf54..c9be60e3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -46,7 +46,7 @@ jobs: echo $CONDA_PREFIX conda info conda list - conde env list + conda env list conda activate base make -C docs html linkcheck env: From 8eb5d0f2224143f975913eabd95461f8eb061dd1 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 12:08:05 +0200 Subject: [PATCH 32/39] debug ci --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c9be60e3..2c7516d8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,17 +37,17 @@ jobs: environment-file: environment.yml python-version: "3.11" auto-activate-base: true - activate-environment: true + activate-environment: aiida-tutorials - name: "Build HTML docs" run: | ls - tree echo $CONDA_DEFAULT_ENV echo $CONDA_PREFIX conda info conda list conda env list - conda activate base + conda init + conda activate aiida-tutorials make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going From 6118d30f3ae6765f952a05fce5d9397d4411f458 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 12:19:13 +0200 Subject: [PATCH 33/39] debug ci --- .github/workflows/ci.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2c7516d8..6a2858d1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,10 +43,9 @@ jobs: ls echo $CONDA_DEFAULT_ENV echo $CONDA_PREFIX - conda info - conda list conda env list conda init + source /home/runner/.bashrc conda activate aiida-tutorials make -C docs html linkcheck env: From e38a3724cae64de2208fa3a2c97c8dc08e953ef6 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 12:29:13 +0200 Subject: [PATCH 34/39] debug ci --- .github/workflows/ci.yml | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6a2858d1..32edf738 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,17 +36,21 @@ jobs: channels: conda-forge, defaults environment-file: environment.yml python-version: "3.11" - auto-activate-base: true - activate-environment: aiida-tutorials + activate-environment: aiida-tutorials # updating base causes conflicts so we create a new env - name: "Build HTML docs" run: | - ls - echo $CONDA_DEFAULT_ENV - echo $CONDA_PREFIX - conda env list conda init source /home/runner/.bashrc conda activate aiida-tutorials + rabbitmq-server -detached + sleep 10 + rabbitmq-diagnostics status + verdi presto + verdi daemon start + verdi status + aiida-pseudo install sssp -x PBEsol + verdi group list + cat /proc/cpuinfo | grep processor | wc -l make -C docs html linkcheck env: SPHINXOPTS: -nW --keep-going From 32bde7f9e346ef1f093e4858645554359ca94d26 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 12:34:51 +0200 Subject: [PATCH 35/39] move linkcheck to RTD --- .github/workflows/ci.yml | 33 --------------------------------- .readthedocs.yml | 2 ++ 2 files changed, 2 insertions(+), 33 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 32edf738..39acc0b7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,36 +21,3 @@ jobs: with: python-version: 3.11 - uses: pre-commit/action@v2.0.0 - - build: - - runs-on: ubuntu-latest - timeout-minutes: 30 - - steps: - - uses: actions/checkout@v2 - - uses: conda-incubator/setup-miniconda@v3 - with: - miniforge-version: latest - use-mamba: true - channels: conda-forge, defaults - environment-file: environment.yml - python-version: "3.11" - activate-environment: aiida-tutorials # updating base causes conflicts so we create a new env - - name: "Build HTML docs" - run: | - conda init - source /home/runner/.bashrc - conda activate aiida-tutorials - rabbitmq-server -detached - sleep 10 - rabbitmq-diagnostics status - verdi presto - verdi daemon start - verdi status - aiida-pseudo install sssp -x PBEsol - verdi group list - cat /proc/cpuinfo | grep processor | wc -l - make -C docs html linkcheck - env: - SPHINXOPTS: -nW --keep-going diff --git a/.readthedocs.yml b/.readthedocs.yml index e650775d..69344444 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,6 +20,8 @@ build: - aiida-pseudo install sssp -x PBEsol - verdi group list - cat /proc/cpuinfo | grep processor | wc -l + - python -m sphinx -T -W --keep-going -b linkcheck -d _build/doctrees -D language=en . $READTHEDOCS_OUTPUT/html + # Build documentation in the docs/ directory with Sphinx sphinx: From 7620580a6f6ba55dec06967ee4749aa5a59ad47a Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 13:18:58 +0200 Subject: [PATCH 36/39] debug rtd --- .readthedocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 69344444..6531613e 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ build: - aiida-pseudo install sssp -x PBEsol - verdi group list - cat /proc/cpuinfo | grep processor | wc -l - - python -m sphinx -T -W --keep-going -b linkcheck -d _build/doctrees -D language=en . $READTHEDOCS_OUTPUT/html + - python -m sphinx -T -W --keep-going -b linkcheck -d _build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html # Build documentation in the docs/ directory with Sphinx From e531860b493445a1fe02b57a3fecce12a0261e2b Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 13:31:11 +0200 Subject: [PATCH 37/39] add html to link ignore, also add_css_file --- .readthedocs.yml | 2 +- docs/conf.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 6531613e..d9fa18a9 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -20,7 +20,7 @@ build: - aiida-pseudo install sssp -x PBEsol - verdi group list - cat /proc/cpuinfo | grep processor | wc -l - - python -m sphinx -T -W --keep-going -b linkcheck -d _build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html + - python -m sphinx -T --keep-going -b linkcheck -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html # Build documentation in the docs/ directory with Sphinx diff --git a/docs/conf.py b/docs/conf.py index 9cf433c3..5395bc27 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -392,13 +392,12 @@ suppress_warnings = ["misc.highlighting_failure"] # Links we ignore, because they do not work temporary and we cannot fix it -linkcheck_ignore = ["https://www.big-map.eu/"] - - -def setup(app): - """Setup function called by sphinx.""" - app.add_css_file("css/custom.css") - +linkcheck_ignore = [ + "https://www.big-map.eu/", + "*concept/index.html", + "*howto/index.html", + "http://127.0.0.1:8000/workgraph", +] # we don't want to run the notebook nb_execution_mode = "off" @@ -492,4 +491,5 @@ def copy_html_files(app, exception): def setup(app): + app.add_css_file("css/custom.css") app.connect("build-finished", copy_html_files) From 55210f278b204f975a614768572798c563a63f3f Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 14:08:21 +0200 Subject: [PATCH 38/39] add regex correctly --- docs/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5395bc27..78134b47 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -394,8 +394,8 @@ # Links we ignore, because they do not work temporary and we cannot fix it linkcheck_ignore = [ "https://www.big-map.eu/", - "*concept/index.html", - "*howto/index.html", + r".*concept/index.html", + r".*howto/index.html", "http://127.0.0.1:8000/workgraph", ] From 61ce7e60ab6709a82d394f437e41d54f3c953048 Mon Sep 17 00:00:00 2001 From: Alexander Goscinski Date: Thu, 12 Sep 2024 14:38:52 +0200 Subject: [PATCH 39/39] Apply suggestions from code review --- .readthedocs.yml | 3 --- README.md | 2 +- docs/conf.py | 2 +- requirements.txt | 2 +- 4 files changed, 3 insertions(+), 6 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index d9fa18a9..d7c9d85f 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -37,6 +37,3 @@ conda: environment: environment.yml # Optionally set the version of Python and requirements required to build your docs -#python: -# install: -# - requirements: requirements.txt diff --git a/README.md b/README.md index 063badb6..6e1da2fc 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,7 @@ conda env create --quiet --name aiida-tutorials --file environment.yml conda activate aiida-tutorials pre-commit install # enable pre-commit hooks (optional) make -C docs html # to build docs -make -C docs html linkcheck # to run link checks +make -C docs linkcheck # to run link checks (only for dev) # open build/html/index.html ``` diff --git a/docs/conf.py b/docs/conf.py index 78134b47..a4f30e55 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -399,7 +399,7 @@ "http://127.0.0.1:8000/workgraph", ] -# we don't want to run the notebook +# We are not installing a full aiida environment nb_execution_mode = "off" # Intersphinx configuration diff --git a/requirements.txt b/requirements.txt index bcf2a426..bd6cd99e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ sphinx-book-theme~=1.1.3 sphinx-copybutton~=0.5.2 sphinx-panels~=0.4.1 sphinx-tabs~=3.4.5 -sphinx-gallery +sphinx-gallery~=0.17.1 myst-nb~=1.1.1 # to run notebooks aiida-quantumespresso