From 14b6a9725fff20740a2a09d2c6ed18f21be06624 Mon Sep 17 00:00:00 2001 From: Bruce Perry <53018946+baperry2@users.noreply.github.com> Date: Fri, 8 Dec 2023 13:47:15 -0700 Subject: [PATCH] Add amrex and sundials as submodules of PelePhysics (#451) * add amrex and sundials as submodules * default paths for amrex and sundials to submodules * update ci to use submodules * add docs for building with submods * dependabot for amrex updates --- .github/dependabot.yml | 9 +++ .github/workflows/ci.yml | 15 +---- .gitignore | 1 - .gitmodules | 6 ++ Docs/sphinx/GettingStarted.rst | 100 +++++++++++++++++++-------------- Submodules/amrex | 1 + Submodules/sundials | 1 + Testing/Exec/Make.PelePhysics | 4 +- 8 files changed, 80 insertions(+), 57 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .gitmodules create mode 160000 Submodules/amrex create mode 160000 Submodules/sundials diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..a1bfa4c9b --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,9 @@ +version: 2 + +updates: + - package-ecosystem: gitsubmodule + schedule: + interval: "monthly" + directory: / + allow: + - dependency-name: "Submodules/amrex" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 05a988b47..d159dc0cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,7 @@ name: PelePhysics-CI on: + workflow_dispatch: push: branches: [development] pull_request: @@ -115,21 +116,9 @@ jobs: uses: actions/checkout@v3 with: path: PelePhysics-${{matrix.comp}} - - name: Clone SUNDIALS - uses: actions/checkout@v3 - with: - repository: LLNL/sundials - ref: 'v6.6.1' - path: sundials-${{matrix.comp}} - - name: Clone AMReX - uses: actions/checkout@v3 - with: - repository: AMReX-Codes/amrex - path: AMReX-${{matrix.comp}} + submodules: recursive - name: Set Environment Variables run: | - echo "AMREX_HOME=${{github.workspace}}/AMReX-${{matrix.comp}}" >> $GITHUB_ENV - echo "SUNDIALS_HOME=${{github.workspace}}/sundials-${{matrix.comp}}" >> $GITHUB_ENV echo "PELE_PHYSICS_HOME=${{github.workspace}}/PelePhysics-${{matrix.comp}}" >> $GITHUB_ENV echo "TRANSPORT_WORKING_DIRECTORY=${{github.workspace}}/PelePhysics-${{matrix.comp}}/Testing/Exec/TranEval" >> $GITHUB_ENV echo "EOS_WORKING_DIRECTORY=${{github.workspace}}/PelePhysics-${{matrix.comp}}/Testing/Exec/EosEval" >> $GITHUB_ENV diff --git a/.gitignore b/.gitignore index 824a57af9..498065751 100644 --- a/.gitignore +++ b/.gitignore @@ -64,6 +64,5 @@ SAVE/ SAVE_file*/ NO_GIT SuiteSparse -sundials ThirdParty/INSTALL ThirdParty/BUILD diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..5f7c972b3 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,6 @@ +[submodule "Submodules/sundials"] + path = Submodules/sundials + url = https://github.com/LLNL/sundials.git +[submodule "Submodules/amrex"] + path = Submodules/amrex + url = https://github.com/AMReX-Codes/amrex.git diff --git a/Docs/sphinx/GettingStarted.rst b/Docs/sphinx/GettingStarted.rst index 2a9ed9124..59d406466 100644 --- a/Docs/sphinx/GettingStarted.rst +++ b/Docs/sphinx/GettingStarted.rst @@ -4,77 +4,95 @@ `PelePhysics` Quickstart ************************ -Greetings impatient user. Once again, note that this documentation focuses on the CHEMISTRY part of `PelePhysics`. +Greetings impatient user. As a word of caution, this documentation is in progress. Some parts of the code remain undocumented, +and some parts of the documentation are out of data. If you are confused by something you read here, or otherwise +need help with `PelePhysics`, the best course of action is to open a Discussion on the `GitHub page `_, +so the development team and other users can help. -- If you are familiar with `PelePhysics`, have it installed already and would simply like to know which chemistry-related keywords and/or environment variables to set in your various input files to perform a simulation with one of the codes available in the `PeleSuite`, then I invite you to directly skip to section :ref:`sec:subsPPOptions`. - If you are a complete beginner, I urge you to carefully read the two following chapters :ref:`sec:GetPP` and :ref:`sec:GetCVODE`, to properly set-up your working environment. +- If you are familiar with `PelePhysics`, have it installed already and would simply like to know which chemistry-related keywords and/or environment variables to set in your various input files to perform a simulation with one of the codes available in the `PeleSuite`, then I invite you to directly skip to section :ref:`sec:subsPPOptions`. - If you are in a hurry but still would like more context, visit section :ref:`sec:subsWD` to be referred to portions of this document that are of interest to you. - .. _sec:GetPP: Obtaining `PelePhysics` ======================= +PelePhysics is primarily intended as a library for use in the other `Pele codes `_, and is automatically downloaded as +a submodule of both PeleC and PeleLMeX. However, it can also be used as a stand-alone solver for chemical reactions and thermodynamic properties, +or as a library for other codes. Instructions for how to obtain `PelePhysics` for these purposes are provided here. First, make sure that "Git" is installed on your machine---we recommend version 1.7.x or higher. Then... -1. Download the `AMReX` repository by typing: :: +1. Clone the `PelePhysics` repository and its submodules: :: - git clone https://github.com/AMReX-Codes/amrex.git + git clone --recursive https://github.com/AMReX-Combustion/PelePhysics.git -This will create an ``amrex/`` folder on your machine. Next, set the environment variable ``AMREX_HOME`` to point to the location where you have downloaded `AMReX`:: + This will create a ``PelePhysics`` directory on your machine. The ``--recursive`` option ensures that the required :ref:`sec:GetCVODE` are also downloaded to the + ``PelePhysics/Submodules`` directory. Set the environment variable ``PELE_PHYSICS_HOME`` to point to the location of this folder (``export PELE_PHYSICS_HOME=$(pwd)/PelePhysics``) - export AMREX_HOME=/path/to/amrex/ - -2. Clone the `Pele` repository: :: +2. Periodically update the repository and its dependencies by typing ``git pull && git submodule update`` within the repository. - git clone git@github.com:AMReX-Combustion/PelePhysics.git -This will create a ``PelePhysics`` folder on your machine. Set the environment variable ``PELE_PHYSICS_HOME`` to point to the location of this folder. +.. _sec:GetCVODE: -3. Periodically update both of these repositories by typing ``git pull`` within each repository. +Dependencies +============ +PelePhysics has two required dependencies: `AMReX `_ and `SUNDIALS `_. +These dependencies are shipped with PelePhysics as git submodules and the proper versions are cloned to the ``PelePhysics/Submodules/`` directory automatically when +doing the recursive git clone described in :ref:`sec:GetPP`. Users also have the option to download their own versions of these dependencies elsewhere on their machine, in which case +the paths to these libraries must be specified by exporting ``AMREX_HOME`` and ``SUNDIALS_HOME`` environment variables or defining these variables in +the ``GNUmakefile`` when building. -.. _sec:GetCVODE: +1. `AMReX` is a library that provides data structures and methods for operating on data in the context of + block-structured adaptive mesh refinement that are used throughout the Pele suite of codes. -Install CVODE and `SuiteSparse` -=============================== +2. `SUNDIALS` is a library of differential and algebraic equation solvers that is used by PelePhysics + primarily for its CVODE package for implicit solution of stiff ODE systems. CVODE is used for integration of stiff chemical reaction systems in PelePhysics. + +PelePhysics has two optional dependencies: `SuiteSparse `_ and `MAGMA `_. +These dependencies are not shipped with PelePhysics by default, but the proper versions are automatically downloaded if needed during the building +process (:ref:`sec:BuildingRunning`). Both of these dependencies are libraries that aid in solving the linear systems that arise during implicit integration of a chemical system using CVODE. +There are several other options available for solvers within CVODE, so these libraries are not strictly required, but they may enable options that +lead to better performance on certain computing architectures. -***The user is in charge of installing the proper CVODE version, as well as installing and properly linking the KLU library if sparsity features are needed.** +1. `SuiteSparse` is a suite of Sparse Matrix software that is very handy when dealing with big kinetic mechanisms (the Jacobian of which are usually very sparse). + In such a case, CVODE can make use of the KLU library, which is part of `SuiteSparse`, to perform sparse linear algebra. + Documentation and further information can be found on `SuiteSparse website `_. + +2. `MAGMA` is a collection of linear algebra libraries for heterogeneous computing. + + +.. _sec:BuildingRunning: + +Building and Running Test Cases +=============================== +PelePhysics has several short programs that are used to test its various capabilities, located in the ``Testing/Exec`` directory. For example, +we will consider the `ReactEval` case, which tests the chemical reaction integration capability. :: -SuiteSparse ------------ + cd ${PELE_PHYSICS_HOME}/Testing/Exec/ReactEval -`SuiteSparse` is a suite of Sparse Matrix software that is very handy when dealing with big kinetic mechanisms (the Jacobian of which are usually very sparse). -In such a case, CVODE can make use of the KLU library, which is part of `SuiteSparse`, to perform sparse linear algebra. -Documentation and further information can be found on `SuiteSparse website `_. +The ``GNUmakefile`` in this directory specifies several key build options, like the compiler (``COMP``) and whether it will be a debug (``DEBUG``) build. -At the time this note is written, the recommended **SuiteSparse version** is **5.4.0**. Follow these steps to set-up your working environment and build the required libraries: +First, build the necessary dependencies. `SUNDIALS` is always built. +`SuiteSparse` is downloaded and built if ``PELE_USE_KLU = TRUE`` is specified in the ``GNUmakefile``. +`MAGMA` is downloaded and built if ``PELE_USE_MAGMA = TRUE`` is specified in the ``GNUmakefile``. +All dependencies are installed in the ${PELE_PHYSICS_HOME}/ThirdParty directory. For a given set of +compile-time options, this step only needs to be done once, but it needs to be redone whenever compile-time +options are changed :: -1. Go to `the SuiteSparse website `_ and download the compressed file for the recommended version -2. Copy the tar file into ``$PELE_PHYSICS_HOME/ThirdParty`` -3. Untar ('tar -zxvf'), cd into it and type 'make' into the following folders: ``SuiteSparse_config``, ``AMD``, ``COLAMD``, ``BTF`` -4. Go into ``metis-5.1.0`` and type 'make config shared=1' followed by 'make' -5. Go into ``KLU`` and type 'make' -6. Check that all dynamic libraries have correctly been generated and copied into the folder ``$PELE_PHYSICS_HOME/ThirdParty/SuiteSparse/lib`` -7. It is recommended that you add the path ``$PELE_PHYSICS_HOME/ThirdParty/SuiteSparse/lib`` to your ``LD_LIBRARY_PATH``, for precaution -8. Note that depending upon your compiler, the static ``.a`` versions of the libraries might also be required. In such a case, you can copy them directly from each program folder into the ``SuiteSparse/lib`` folder + make TPL -CVODE ------ +Now, build the `ReactEval` executable (the ``-j 4`` option specified to compile in parallel on 4 processors): :: -CVODE is a solver for stiff and nonstiff ordinary differential equation (ODE) systems. Documentation and further information can be found `online `_. -At the time this note is written, the recommended **CVODE version** is **v5.0.0**. + make -j 4 -The CVODE sources are distributed as compressed archives, with names following the convention ``cvode-x.y.z.tar.gz``. They can be downloaded by following -`this link `_. However, we have designed a simple script enabling to install the current version the correct way. Simply: +To run the program, execute: :: -1. Go into ``$PELE_PHYSICS_HOME/ThirdParty`` -2. Execute either ``get_sundials_v5dev1.sh`` or ``get_sundials_v5dev1_CUDA.sh`` depending on your application (GPU or not) and machine -3. Set the ``SUNDIALS_LIB_DIR`` environment variable to point to the location where all CVODE libraries have been generated. If you followed these guidelines, it should be ``$PELE_PHYSICS_HOME/ThirdParty/sundials/instdir/lib/`` -4. It is recommended, here also, that you add the path ``$PELE_PHYSICS_HOME/ThirdParty/sundials/instdir/lib`` to your ``LD_LIBRARY_PATH``, as paths can get lost in the build of external libraries + ./Pele3d.gnu.ex inputs.3d-regt_GPU -Note that if you do not want to use the KLU library, you can also disable the flags (``-DKLU_ENABLE``) in the ``*.sh`` scripts. +If you need to clean your build, you can run :: + make TPLrealclean && make realclean diff --git a/Submodules/amrex b/Submodules/amrex new file mode 160000 index 000000000..edb4c2502 --- /dev/null +++ b/Submodules/amrex @@ -0,0 +1 @@ +Subproject commit edb4c25027efbbc465c88d453441dcd7115d8651 diff --git a/Submodules/sundials b/Submodules/sundials new file mode 160000 index 000000000..cf16ebf14 --- /dev/null +++ b/Submodules/sundials @@ -0,0 +1 @@ +Subproject commit cf16ebf14318c7e60cbf94728a8c5c8415acf610 diff --git a/Testing/Exec/Make.PelePhysics b/Testing/Exec/Make.PelePhysics index 8d6a1be0d..85ed2ae5d 100644 --- a/Testing/Exec/Make.PelePhysics +++ b/Testing/Exec/Make.PelePhysics @@ -1,7 +1,7 @@ PELE_HOME ?= ../../../.. -AMREX_HOME ?= $(abspath $(lastword $(PELE_HOME)/Submodules/amrex)) PELE_PHYSICS_HOME ?= $(abspath $(lastword $(PELE_HOME)/Submodules/PelePhysics)) -SUNDIALS_HOME ?= $(abspath $(lastword $(PELE_HOME)/Submodules/sundials)) +AMREX_HOME ?= $(abspath $(lastword $(PELE_PHYSICS_HOME)/Submodules/amrex)) +SUNDIALS_HOME ?= $(abspath $(lastword $(PELE_PHYSICS_HOME)/Submodules/sundials)) EOS_HOME ?= $(PELE_PHYSICS_HOME)/Eos REACTIONS_HOME ?= $(PELE_PHYSICS_HOME)/Reactions