diff --git a/.github/workflows/macos-ci-aarch64.yaml b/.github/workflows/macos-ci-aarch64.yaml
index e905915f2..cb4c6c219 100644
--- a/.github/workflows/macos-ci-aarch64.yaml
+++ b/.github/workflows/macos-ci-aarch64.yaml
@@ -35,7 +35,7 @@ jobs:
mkdir -p /Users/ec2-user/spack-stack/build-cache
mkdir -p /Users/ec2-user/spack-stack/source-cache
- - name: create-env
+ - name: create-buildcache
run: |
# Get day of week to decide whether to use build caches or not
DOW=$(date +%u)
@@ -56,15 +56,13 @@ jobs:
source ./setup.sh
# Important!
export SPACK_PYTHON=/usr/bin/python3
- export ENVNAME=ue-apple-clang-14.0.3
- export ENVDIR=$PWD/envs/${ENVNAME}
-
spack clean -ab
spack bootstrap now
-
+ #
+ export ENVNAME=ue-apple-clang-14.0.3-buildcache
+ export ENVDIR=$PWD/envs/${ENVNAME}
spack stack create env --site macos.default --template unified-dev --name ${ENVNAME} --compiler apple-clang
spack env activate ${ENVDIR}
- spack add ${{ inputs.specs || '' }}
export SPACK_SYSTEM_CONFIG_PATH="${ENVDIR}/site"
# Find external packages
@@ -86,8 +84,11 @@ jobs:
export -n SPACK_SYSTEM_CONFIG_PATH
+ # For buildcaches
+ spack config add config:install_tree:padded_length:200
+
# Set compiler and MPI specs
- spack config add "packages:all:providers:mpi:[openmpi@5.0.3]"
+ spack config add "packages:all:providers:mpi:[openmpi@5.0.5]"
spack config add "packages:all:compiler:[apple-clang@14.0.3]"
# Add additional variants for MET packages, different from config/common/packages.yaml
@@ -97,8 +98,8 @@ jobs:
# *DH
# Concretize and check for duplicates
- spack concretize 2>&1 | tee log.concretize.apple-clang-14.0.3
- ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.apple-clang-14.0.3 -i fms -i crtm -i esmf -i mapl
+ spack concretize 2>&1 | tee log.concretize.apple-clang-14.0.3-buildcache
+ ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.apple-clang-14.0.3-buildcache -i fms -i crtm -i esmf -i mapl
# Add and update source cache
spack mirror add local-source file:///Users/ec2-user/spack-stack/source-cache/
@@ -121,29 +122,66 @@ jobs:
# base-env
echo "base-env ..."
spack install --fail-fast --source --no-check-signature base-env 2>&1 | tee log.install.apple-clang-14.0.3.base-env
- spack buildcache create -a -u /Users/ec2-user/spack-stack/build-cache/ base-env
+ spack buildcache create -u /Users/ec2-user/spack-stack/build-cache/ base-env
# jedi-base-env
echo "jedi-base-env ..."
spack install --fail-fast --source --no-check-signature jedi-base-env 2>&1 | tee log.install.apple-clang-14.0.3.jedi-base-env
- spack buildcache create -a -u /Users/ec2-user/spack-stack/build-cache/ jedi-base-env
+ spack buildcache create -u /Users/ec2-user/spack-stack/build-cache/ jedi-base-env
+
+ # jedi-ufs-env
+ echo "jedi-ufs-env ..."
+ spack install --fail-fast --source --no-check-signature jedi-ufs-env 2>&1 | tee log.install.apple-clang-14.0.3.jedi-ufs-env
+ spack buildcache create -u /Users/ec2-user/spack-stack/build-cache/ jedi-ufs-env
# the rest
echo "unified-env ..."
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.apple-clang-14.0.3.unified-env
- spack buildcache create -a -u /Users/ec2-user/spack-stack/build-cache/
+ spack buildcache create -u /Users/ec2-user/spack-stack/build-cache/
# Remove binary cache for next round of concretization
if [ "$USE_BINARY_CACHE" = true ] ; then
spack mirror rm local-binary
fi
+ # Remove buildcache config settings
+ spack config remove config:install_tree:padded_length
+
+ # Next steps: synchronize source and build cache to a central/combined mirror?
+
+ # Cleanup
+ spack clean -a
+ spack env deactivate
+
+ - name: create-env
+ run: |
+ # Set up spack-stack
+ source ./setup.sh
+ # Important!
+ export SPACK_PYTHON=/usr/bin/python3
+ export BUILDCACHE_ENVNAME=ue-apple-clang-14.0.3-buildcache
+ export BUILDCACHE_ENVDIR=$PWD/envs/${BUILDCACHE_ENVNAME}
+ export ENVNAME=ue-apple-clang-14.0.3
+ export ENVDIR=$PWD/envs/${ENVNAME}
+ rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/
+ spack env activate ${ENVDIR}
+
+ # Concretize and check for duplicates
+ spack concretize --force 2>&1 | tee log.concretize.apple-clang-14.0.3
+ ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.apple-clang-14.0.3 -i fms -i crtm -i esmf -i mapl
+
+ # Add binary cache back in
+ spack mirror add local-binary file:///Users/ec2-user/spack-stack/build-cache/
+ echo "Packages in combined spack build caches:"
+ spack buildcache list
+
+ # Install from cache
+ spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.apple-clang-14.0.3.unified-env
+
# Create modules
spack clean -a
spack module lmod refresh -y
spack stack setup-meta-modules
-
- # Next steps: synchronize source and build cache to a central/combined mirror?
spack env deactivate
# Test environment chaining
@@ -160,7 +198,6 @@ jobs:
- name: test-env
run: |
- # Set up homebrew and lmod support
eval "$(/opt/homebrew/bin/brew shellenv)"
source /opt/homebrew/opt/lmod/init/profile
@@ -170,7 +207,7 @@ jobs:
module use ${ENVDIR}/install/modulefiles/Core
module load stack-apple-clang/14.0.3
- module load stack-openmpi/5.0.3
+ module load stack-openmpi/5.0.5
module load stack-python/3.11.7
module available
diff --git a/.github/workflows/ubuntu-ci-x86_64-gnu.yaml b/.github/workflows/ubuntu-ci-x86_64-gnu.yaml
index 3b12d1c11..1a73ad69b 100644
--- a/.github/workflows/ubuntu-ci-x86_64-gnu.yaml
+++ b/.github/workflows/ubuntu-ci-x86_64-gnu.yaml
@@ -115,17 +115,22 @@ jobs:
# base-env
echo "base-env ..."
spack install --fail-fast --source --no-check-signature base-env 2>&1 | tee log.install.gnu-11.4.0-buildcache.base-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/ base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ base-env
# jedi-base-env
echo "jedi-base-env ..."
spack install --fail-fast --source --no-check-signature jedi-base-env 2>&1 | tee log.install.gnu-11.4.0-buildcache.jedi-base-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+
+ # jedi-ufs-env
+ echo "jedi-ufs-env ..."
+ spack install --fail-fast --source --no-check-signature jedi-ufs-env 2>&1 | tee log.install.gnu-11.4.0-buildcache.jedi-ufs-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-ufs-env
# the rest
echo "${TEMPLATE} ..."
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.gnu-11.4.0-buildcache.${TEMPLATE}
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/
# Remove binary cache for next round of concretization
if [ "$USE_BINARY_CACHE" = true ] ; then
@@ -145,6 +150,7 @@ jobs:
- name: create-env
run: |
+ # Set up spack-stack
source ./setup.sh
export BUILDCACHE_ENVNAME=ue-gcc-11.4.0-buildcache
export BUILDCACHE_ENVDIR=$PWD/envs/${BUILDCACHE_ENVNAME}
@@ -153,20 +159,22 @@ jobs:
rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/
spack env activate ${ENVDIR}
- # Concretize
+ # Concretize and check for duplicates
spack concretize --force 2>&1 | tee log.concretize.gnu-11.4.0
${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.gnu-11.4.0 -i fms -i crtm -i esmf -i mapl
- # Add binary cache back in and reindex it
+ # Add binary cache back in
spack mirror add local-binary file:///home/ubuntu/spack-stack/build-cache/
echo "Packages in combined spack build caches:"
spack buildcache list
- # base-env
- echo "base-env ..."
+ # Install from cache
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.gnu-11.4.0.unified-env
+ # Check shared libraries
${SPACK_STACK_DIR}/util/ldd_check.py $SPACK_ENV 2>&1 | tee log.ldd_check
+
+ # Create modules
spack clean -a
spack module tcl refresh -y
spack stack setup-meta-modules
@@ -201,7 +209,7 @@ jobs:
module use ${ENVDIR}/install/modulefiles/Core
module load stack-gcc/11.4.0
- module load stack-openmpi/5.0.3
+ module load stack-openmpi/5.0.5
module load stack-python/3.11.7
module available
diff --git a/.github/workflows/ubuntu-ci-x86_64-intel.yaml b/.github/workflows/ubuntu-ci-x86_64-intel.yaml
index eed14ce5b..f391ce901 100644
--- a/.github/workflows/ubuntu-ci-x86_64-intel.yaml
+++ b/.github/workflows/ubuntu-ci-x86_64-intel.yaml
@@ -93,7 +93,7 @@ jobs:
echo " intel-oneapi-mpi:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
echo " buildable: false" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
echo " externals:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
- echo " - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0 +classic-names" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
echo " prefix: /opt/intel/oneapi" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
# Add external Intel MKL and oneAPI runtime
@@ -155,17 +155,22 @@ jobs:
# base-env
echo "base-env ..."
spack install --fail-fast --source --no-check-signature base-env 2>&1 | tee log.install.intel-2021.10.0-buildcache.base-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/ base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ base-env
# jedi-base-env
echo "jedi-base-env ..."
spack install --fail-fast --source --no-check-signature jedi-base-env 2>&1 | tee log.install.intel-2021.10.0-buildcache.jedi-base-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+
+ # jedi-ufs-env
+ echo "jedi-ufs-env ..."
+ spack install --fail-fast --source --no-check-signature jedi-ufs-env 2>&1 | tee log.install.intel-2021.10.0-buildcache.jedi-ufs-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-ufs-env
# the rest
echo "unified-env ..."
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.intel-2021.10.0-buildcache.unified-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/
# Remove binary cache for next round of concretization
if [ "$USE_BINARY_CACHE" = true ] ; then
@@ -183,6 +188,7 @@ jobs:
- name: create-env
run: |
+ # Set up spack-stack
source ./setup.sh
export BUILDCACHE_ENVNAME=ue-intel-2021.10.0-buildcache
export BUILDCACHE_ENVDIR=$PWD/envs/${BUILDCACHE_ENVNAME}
@@ -191,20 +197,22 @@ jobs:
rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/
spack env activate ${ENVDIR}
- # Concretize
+ # Concretize and check for duplicates
spack concretize --force 2>&1 | tee log.concretize.intel-2021.10.0
${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.intel-2021.10.0 -i fms -i crtm -i esmf -i mapl
- # Add binary cache back in and reindex it
+ # Add binary cache back in
spack mirror add local-binary file:///home/ubuntu/spack-stack/build-cache/
echo "Packages in combined spack build caches:"
spack buildcache list
- # base-env
- echo "base-env ..."
+ # Install from cache
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.intel-2021.10.0.unified-env
+ # Check shared libraries
${SPACK_STACK_DIR}/util/ldd_check.py $SPACK_ENV 2>&1 | tee log.ldd_check
+
+ # Create modules
spack clean -a
spack module tcl refresh -y
spack stack setup-meta-modules
diff --git a/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml b/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml
new file mode 100644
index 000000000..0397b0e51
--- /dev/null
+++ b/.github/workflows/ubuntu-ci-x86_64-oneapi-ifx.yaml
@@ -0,0 +1,245 @@
+name: ubuntu-ci-c6a-x86_64-oneapi-ifx-build
+on:
+ pull_request:
+ workflow_dispatch:
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+ cancel-in-progress: true
+
+defaults:
+ run:
+ shell: bash
+
+jobs:
+ ubuntu-ci-c6a-x86_64-oneapi-ifx-build:
+ runs-on: [ubuntu-ci-c6a-x86_64]
+
+ steps:
+ - name: cleanup
+ run: |
+ pwd
+ ls -lart
+ set +e
+ find ./* -type d -exec chmod u+xw {} \;
+ set -e
+ rm -fr *
+
+ - name: checkout
+ uses: actions/checkout@v4
+ with:
+ submodules: true
+
+ - name: prepare-directories
+ run: |
+ mkdir -p /home/ubuntu/spack-stack/build-cache/
+ mkdir -p /home/ubuntu/spack-stack/source-cache/
+
+ - name: create-buildcache
+ run: |
+ # Get day of week to decide whether to use build caches or not
+ DOW=$(date +%u)
+ # Monday is 1 ... Sunday is 7
+ if [[ $DOW == 7 ]]; then
+ export USE_BINARY_CACHE=false
+ echo "Ignore existing binary cache for creating buildcache environment"
+ else
+ export USE_BINARY_CACHE=true
+ echo "Use existing binary cache for creating buildcache environment"
+ fi
+
+ # Set up spack-stack
+ source ./setup.sh
+ export ENVNAME=ue-oneapi-ifx-2024.2.0-buildcache
+ export ENVDIR=$PWD/envs/${ENVNAME}
+ spack stack create env --site linux.default --template unified-dev --name ${ENVNAME} --compiler oneapi
+ spack env activate ${ENVDIR}
+ export SPACK_SYSTEM_CONFIG_PATH="${ENVDIR}/site"
+
+ # Find external packages
+ spack external find --scope system \
+ --exclude bison --exclude openssl \
+ --exclude curl --exclude python \
+ --exclude gmake
+ spack external find --scope system sed
+ spack external find --scope system perl
+ spack external find --scope system wget
+ spack external find --scope system texlive
+ spack external find --scope system mysql
+
+ # Find compilers
+ spack compiler find --scope system
+
+ # For Intel oneAPI
+ echo "" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo "- compiler:" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " spec: oneapi@2024.2.0" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " paths:" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " cc: /opt/intel/oneapi/compiler/2024.2/bin/icx" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " cxx: /opt/intel/oneapi/compiler/2024.2/bin/icpx" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " f77: /opt/intel/oneapi/compiler/2024.2/bin/ifx" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " fc: /opt/intel/oneapi/compiler/2024.2/bin/ifx" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " flags: {}" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " operating_system: ubuntu22.04" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " target: x86_64" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " modules: []" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " environment: {}" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+ echo " extra_rpaths: []" >> ${SPACK_SYSTEM_CONFIG_PATH}/compilers.yaml
+
+ # Need to find external Intel MPI and annotate with the
+ # correct compiler, no way to do that with spack commands.
+ echo "" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " intel-oneapi-mpi:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " buildable: false" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " externals:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " - spec: intel-oneapi-mpi@2021.13%oneapi@2024.2.0" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " prefix: /opt/intel/oneapi" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+
+ # Add external Intel MKL and oneAPI runtime
+ echo "" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " intel-oneapi-mkl:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " externals:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " - spec: intel-oneapi-mkl@2024.2%oneapi@2024.2.0" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " prefix: /opt/intel/oneapi" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " intel-oneapi-runtime:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " externals:" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " - spec: intel-oneapi-runtime@2024.2.0%oneapi@2024.2.0" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+ echo " prefix: /opt/intel/oneapi" >> ${SPACK_SYSTEM_CONFIG_PATH}/packages.yaml
+
+ export -n SPACK_SYSTEM_CONFIG_PATH
+
+ # For buildcaches
+ spack config add config:install_tree:padded_length:200
+
+ # Set compiler and MPI specs
+ spack config add "packages:all:providers:mpi:[intel-oneapi-mpi@2021.13]"
+ spack config add "packages:all:compiler:[oneapi@2024.2.0,gcc@11.4.0]"
+
+ # Add additional variants for MET packages, different from config/common/packages.yaml
+ spack config add "packages:met:variants:+python +grib2 +graphics +lidar2nc +modis"
+
+ # Switch providers for blas, lapack, fftw-api
+ spack config add "packages:all:providers:mpi:[intel-oneapi-mpi]"
+ spack config add "packages:all:providers:blas:[intel-oneapi-mkl]"
+ spack config add "packages:all:providers:fftw-api:[intel-oneapi-mkl]"
+ spack config add "packages:all:providers:lapack:[intel-oneapi-mkl]"
+ spack config add "packages:ectrans:require:'+mkl ~fftw'"
+ spack config add "packages:gsibec:require:'+mkl'"
+ spack config add "packages:py-numpy:require:['^intel-oneapi-mkl']"
+
+ # Pin gmake to avoid duplicate packages (excluded gmake from spack external find above)
+ spack config add "packages:gmake:require:'@:4.2'"
+
+ # Remove wgrib2 from all virtual packages, since it doesn't build with oneAPI
+ sed -i 's/depends_on("wgrib2")/#depends_on("wgrib2")/g' `grep -lRie wgrib2 spack-ext/`
+
+ # Don't generate ecflow module when using external package
+ spack config add "modules:default:tcl:exclude:[ecflow]"
+
+ # Concretize and check for duplicates
+ spack concretize 2>&1 | tee log.concretize.oneapi-ifx-2024.2.0-buildcache
+ ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.oneapi-ifx-2024.2.0-buildcache -i fms -i crtm -i esmf -i mapl
+
+ # Add and update source cache
+ spack mirror add local-source file:///home/ubuntu/spack-stack/source-cache/
+ spack mirror create -a -d /home/ubuntu/spack-stack/source-cache/
+
+ # Add binary cache if requested
+ if [ "$USE_BINARY_CACHE" = true ] ; then
+ set +e
+ spack mirror add local-binary file:///home/ubuntu/spack-stack/build-cache/
+ spack buildcache update-index local-binary || (echo "No valid binary cache found, proceed without" && spack mirror rm local-binary)
+ set +e
+ echo "Packages in spack binary cache:"
+ spack buildcache list
+ fi
+
+ # Break installation up in pieces and create build caches in between
+ # This allows us to "spin up" builds that altogether take longer than
+ # six hours, and/or fail later in the build process.
+
+ # base-env
+ echo "base-env ..."
+ spack install --fail-fast --source --no-check-signature base-env 2>&1 | tee log.install.oneapi-ifx-2024.2.0-buildcache.base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ base-env
+
+ # jedi-base-env
+ echo "jedi-base-env ..."
+ spack install --fail-fast --source --no-check-signature jedi-base-env 2>&1 | tee log.install.oneapi-ifx-2024.2.0-buildcache.jedi-base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+
+ # jedi-ufs-env
+ echo "jedi-ufs-env ..."
+ spack install --fail-fast --source --no-check-signature jedi-ufs-env 2>&1 | tee log.install.oneapi-ifx-2024.2.0-buildcache.jedi-ufs-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-ufs-env
+
+ # the rest
+ echo "unified-env ..."
+ spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.oneapi-ifx-2024.2.0-buildcache.unified-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/
+
+ # Remove binary cache for next round of concretization
+ if [ "$USE_BINARY_CACHE" = true ] ; then
+ spack mirror rm local-binary
+ fi
+
+ # Remove buildcache config settings
+ spack config remove config:install_tree:padded_length
+
+ # Next steps: synchronize source and build cache to a central/combined mirror?
+
+ # Cleanup
+ spack clean -a
+ spack env deactivate
+
+ - name: create-env
+ run: |
+ # Set up spack-stack
+ source ./setup.sh
+ export BUILDCACHE_ENVNAME=ue-oneapi-ifx-2024.2.0-buildcache
+ export BUILDCACHE_ENVDIR=$PWD/envs/${BUILDCACHE_ENVNAME}
+ export ENVNAME=ue-oneapi-ifx-2024.2.0
+ export ENVDIR=$PWD/envs/${ENVNAME}
+ rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/
+ spack env activate ${ENVDIR}
+
+ # Concretize and check for duplicates
+ spack concretize --force 2>&1 | tee log.concretize.oneapi-ifx-2024.2.0
+ ${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.oneapi-ifx-2024.2.0 -i fms -i crtm -i esmf -i mapl
+
+ # Add binary cache back in
+ spack mirror add local-binary file:///home/ubuntu/spack-stack/build-cache/
+ echo "Packages in combined spack build caches:"
+ spack buildcache list
+
+ # Install from cache
+ spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.oneapi-ifx-2024.2.0.unified-env
+
+ # Check shared libraries
+ ${SPACK_STACK_DIR}/util/ldd_check.py $SPACK_ENV 2>&1 | tee log.ldd_check
+
+ # Create modules
+ spack clean -a
+ spack module tcl refresh -y
+ spack stack setup-meta-modules
+ spack env deactivate
+
+ - name: test-env
+ run: |
+ source /etc/profile.d/modules.sh
+ module use /home/ubuntu/spack-stack/modulefiles
+
+ export ENVNAME=ue-oneapi-ifx-2024.2.0
+ export ENVDIR=$PWD/envs/${ENVNAME}
+ ls -l ${ENVDIR}/install/modulefiles/Core
+
+ module use ${ENVDIR}/install/modulefiles/Core
+ module load stack-oneapi/2024.2.0
+ module load stack-intel-oneapi-mpi/2021.13
+ module load stack-python/3.11.7
+ module available
+
+ module load jedi-ufs-env
+ module load ewok-env
+ module load soca-env
+ module list
diff --git a/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml b/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml
index 0b4f7d337..976813438 100644
--- a/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml
+++ b/.github/workflows/ubuntu-ci-x86_64-oneapi.yaml
@@ -161,17 +161,22 @@ jobs:
# base-env
echo "base-env ..."
spack install --fail-fast --source --no-check-signature base-env 2>&1 | tee log.install.oneapi-2024.2.0-buildcache.base-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/ base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ base-env
# jedi-base-env
echo "jedi-base-env ..."
spack install --fail-fast --source --no-check-signature jedi-base-env 2>&1 | tee log.install.oneapi-2024.2.0-buildcache.jedi-base-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-base-env
+
+ # jedi-ufs-env
+ echo "jedi-ufs-env ..."
+ spack install --fail-fast --source --no-check-signature jedi-ufs-env 2>&1 | tee log.install.oneapi-2024.2.0-buildcache.jedi-ufs-env
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/ jedi-ufs-env
# the rest
echo "unified-env ..."
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.oneapi-2024.2.0-buildcache.unified-env
- spack buildcache create -a -u /home/ubuntu/spack-stack/build-cache/
+ spack buildcache create -u /home/ubuntu/spack-stack/build-cache/
# Remove binary cache for next round of concretization
if [ "$USE_BINARY_CACHE" = true ] ; then
@@ -189,6 +194,7 @@ jobs:
- name: create-env
run: |
+ # Set up spack-stack
source ./setup.sh
export BUILDCACHE_ENVNAME=ue-oneapi-2024.2.0-buildcache
export BUILDCACHE_ENVDIR=$PWD/envs/${BUILDCACHE_ENVNAME}
@@ -197,39 +203,27 @@ jobs:
rsync -av --exclude='install' --exclude='spack.lock' --exclude='.spack_db' ${BUILDCACHE_ENVDIR}/ ${ENVDIR}/
spack env activate ${ENVDIR}
- # Concretize
+ # Concretize and check for duplicates
spack concretize --force 2>&1 | tee log.concretize.oneapi-2024.2.0
${SPACK_STACK_DIR}/util/show_duplicate_packages.py -d log.concretize.oneapi-2024.2.0 -i fms -i crtm -i esmf -i mapl
- # Add binary cache back in and reindex it
+ # Add binary cache back in
spack mirror add local-binary file:///home/ubuntu/spack-stack/build-cache/
echo "Packages in combined spack build caches:"
spack buildcache list
- # base-env
- echo "base-env ..."
+ # Install from cache
spack install --fail-fast --source --no-check-signature 2>&1 | tee log.install.oneapi-2024.2.0.unified-env
+ # Check shared libraries
${SPACK_STACK_DIR}/util/ldd_check.py $SPACK_ENV 2>&1 | tee log.ldd_check
+
+ # Create modules
spack clean -a
spack module tcl refresh -y
spack stack setup-meta-modules
spack env deactivate
- ### # Test environment chaining
- ### echo "Test environment chaining"
- ### spack stack create env --name chaintest --template empty --site linux.default --upstream ${ENVDIR}/install --compiler intel
- ### # Retain config from upstream so we don't have to rebuild:
- ### cp -r ${ENVDIR}/{site,common} $PWD/envs/chaintest/.
- ### spack env activate ${PWD}/envs/chaintest
- ### # Pin gmake to avoid duplicate packages (excluded gmake from spack external find above)
- ### spack config add "packages:gmake:require:'@:4.2'"
- ### spack add nccmp@1.9.0.1%oneapi
- ### spack concretize | tee envs/chaintest/log.concretize
- ### unwanted_duplicates=$(( cat envs/chaintest/log.concretize | grep -E '^ - ' | grep -Fv 'nccmp@1.9.0.1' || true ) | wc -l)
- ### if [ ${unwanted_duplicates} -gt 0 ]; then echo "Environment chaining test failed"; exit 1; fi
- ### spack env deactivate
-
- name: test-env
run: |
source /etc/profile.d/modules.sh
diff --git a/configs/common/modules_lmod.yaml b/configs/common/modules_lmod.yaml
index 41bfa24df..8fe637fb6 100644
--- a/configs/common/modules_lmod.yaml
+++ b/configs/common/modules_lmod.yaml
@@ -112,20 +112,10 @@ modules:
environment:
set:
'ESMFMKFILE': '{prefix}/lib/esmf.mk'
- hdf5:
- environment:
- set:
- 'HDF5_DIR': '{prefix}'
libpng:
environment:
set:
'PNG_ROOT': '{prefix}'
- libyaml:
- environment:
- set:
- 'YAML_DIR': '{prefix}'
- 'YAML_LIB': '{prefix}/lib'
- 'YAML_INC': '{prefix}/include'
madis:
environment:
set:
@@ -146,17 +136,13 @@ modules:
^esmf@8.7.0b04+debug snapshot=b04: 'esmf-8.7.0b04-debug'
^esmf@8.7.0b11~debug snapshot=b11: 'esmf-8.7.0b11'
^esmf@8.7.0b11+debug snapshot=b11: 'esmf-8.7.0b11-debug'
+ ^esmf@8.7.0~debug snapshot=none: 'esmf-8.7.0'
+ ^esmf@8.7.0+debug snapshot=none: 'esmf-8.7.0-debug'
openmpi:
environment:
set:
'OMPI_MCA_rmaps_base_oversubscribe': '1'
'PRTE_MCA_rmaps_default_mapping_policy': ':oversubscribe'
- p4est:
- environment:
- set:
- 'P4EST_API_DIR': '{prefix}'
- 'P4EST_API_LIB': '{prefix}/lib'
- 'P4EST_API_INC': '{prefix}/include'
bacio:
environment:
set:
diff --git a/configs/common/modules_tcl.yaml b/configs/common/modules_tcl.yaml
index 5ce83e8b4..3275aee44 100644
--- a/configs/common/modules_tcl.yaml
+++ b/configs/common/modules_tcl.yaml
@@ -114,20 +114,10 @@ modules:
environment:
set:
'ESMFMKFILE': '{prefix}/lib/esmf.mk'
- hdf5:
- environment:
- set:
- 'HDF5_DIR': '{prefix}'
libpng:
environment:
set:
'PNG_ROOT': '{prefix}'
- libyaml:
- environment:
- set:
- 'YAML_DIR': '{prefix}'
- 'YAML_LIB': '{prefix}/lib'
- 'YAML_INC': '{prefix}/include'
madis:
environment:
set:
@@ -148,17 +138,13 @@ modules:
^esmf@8.7.0b04+debug snapshot=b04: 'esmf-8.7.0b04-debug'
^esmf@8.7.0b11~debug snapshot=b11: 'esmf-8.7.0b11'
^esmf@8.7.0b11+debug snapshot=b11: 'esmf-8.7.0b11-debug'
+ ^esmf@8.7.0~debug snapshot=none: 'esmf-8.7.0'
+ ^esmf@8.7.0+debug snapshot=none: 'esmf-8.7.0-debug'
openmpi:
environment:
set:
'OMPI_MCA_rmaps_base_oversubscribe': '1'
'PRTE_MCA_rmaps_default_mapping_policy': ':oversubscribe'
- p4est:
- environment:
- set:
- 'P4EST_API_DIR': '{prefix}'
- 'P4EST_API_LIB': '{prefix}/lib'
- 'P4EST_API_INC': '{prefix}/include'
bacio:
environment:
set:
diff --git a/configs/common/packages.yaml b/configs/common/packages.yaml
index fc7089741..1c40bd5e8 100644
--- a/configs/common/packages.yaml
+++ b/configs/common/packages.yaml
@@ -54,17 +54,19 @@ packages:
# Also, check the acorn and derecho site configs which have esmf modifications.
esmf:
require:
- - '~xerces ~pnetcdf +shared +external-parallelio +python'
- - any_of: ['@=8.6.1 snapshot=none', '@=8.7.0b11 snapshot=b11']
- - any_of: ['fflags="-fp-model precise" cxxflags="-fp-model precise"']
- when: "%intel"
- message: "Extra ESMF compile options for Intel"
- #- any_of: ['']
- # when: "%gcc"
- # message: "Extra ESMF compile options for GCC"
- #- any_of: ['']
- # when: "%apple-clang"
- # message: "Extra ESMF compile options for GCC"
+ - '~xerces ~pnetcdf +shared +external-parallelio'
+ - any_of: ['@=8.6.1 snapshot=none', '@=8.7.0 snapshot=none']
+ - any_of: ['fflags="-fp-model precise" cxxflags="-fp-model precise"']
+ when: "%intel"
+ message: "Extra ESMF compile options for Intel"
+ #- any_of: ['']
+ # when: "%gcc"
+ # message: "Extra ESMF compile options for GCC"
+ #- any_of: ['']
+ # when: "%apple-clang"
+ # message: "Extra ESMF compile options for GCC"
+ prefer:
+ - '+python'
fckit:
require: '@0.11.0 +eckit'
fftw:
@@ -100,6 +102,10 @@ packages:
# Note: Uncommenting this entry will break
# the container builds.
#require: '@2.11.0'
+ # To avoid duplicate packages being built
+ gmake:
+ require:
+ - one_of: ['@=4.2.1', '@=4.4.1']
grib-util:
require: '@1.4.0'
gsibec:
@@ -136,6 +142,10 @@ packages:
mapl:
require: '@2.46.3 ~shared ~f2py'
variants: '+pflogger'
+ # To avoid duplicate packages being built
+ meson:
+ require:
+ - '@1.5.1:'
# If making changes here, also check the Discover site configs and the CI workflows
met:
require: '@11.1.1 +python +grib2'
@@ -168,7 +178,7 @@ packages:
odc:
require: '@1.5.2 ~fortran'
openblas:
- require: '@0.3.24 +noavx512'
+ require: '@0.3.24'
openmpi:
require: '~internal-hwloc +two_level_namespace'
openssl:
@@ -256,13 +266,13 @@ packages:
qt:
require: '@5'
scotch:
- require: '@7.0.4 +mpi+metis~shared~threads~mpi_thread+noarch'
+ require: '@7.0.4 +mpi+metis~shared~threads~mpi_thread+noarch+esmumps'
sfcio:
- require: '@1.4.1'
+ require: '@1.4.2'
shumlib:
require: '@macos_clang_linux_intel_port'
sigio:
- require: '@2.3.2'
+ require: '@2.3.3'
sp:
require: '@2.5.0 precision=4,d,8'
udunits:
diff --git a/configs/containers/docker-ubuntu-clang-mpich.yaml b/configs/containers/docker-ubuntu-clang-mpich.yaml
index 6ac7f83e8..67a986d3d 100644
--- a/configs/containers/docker-ubuntu-clang-mpich.yaml
+++ b/configs/containers/docker-ubuntu-clang-mpich.yaml
@@ -59,11 +59,6 @@ spack:
- spec: mpich@4.2.1
prefix: /opt/mpich-4.2.1
version: [4.2.1]
- gmake:
- buildable: false
- externals:
- - spec: gmake@4.3
- prefix: /usr
diffutils:
buildable: false
externals:
diff --git a/configs/containers/docker-ubuntu-gcc-openmpi.yaml b/configs/containers/docker-ubuntu-gcc-openmpi.yaml
index f0e495073..43b5b2fb8 100644
--- a/configs/containers/docker-ubuntu-gcc-openmpi.yaml
+++ b/configs/containers/docker-ubuntu-gcc-openmpi.yaml
@@ -28,7 +28,7 @@ spack:
require: '%gcc'
target: [x86_64]
providers:
- mpi: [openmpi@5.0.3]
+ mpi: [openmpi@5.0.5]
compiler: [gcc@12.3.0]
gcc:
buildable: false
@@ -40,11 +40,6 @@ spack:
externals:
- spec: gcc-runtime@12.3.0
prefix: /usr
- gmake:
- buildable: false
- externals:
- - spec: gmake@4.3
- prefix: /usr
diffutils:
buildable: false
externals:
diff --git a/configs/containers/docker-ubuntu-intel-impi.yaml b/configs/containers/docker-ubuntu-intel-impi.yaml
index 5aca2e63d..6dd5ad15d 100644
--- a/configs/containers/docker-ubuntu-intel-impi.yaml
+++ b/configs/containers/docker-ubuntu-intel-impi.yaml
@@ -50,11 +50,6 @@ spack:
# externals:
# - spec: intel-oneapi-mkl@2022.1.0
# prefix: /opt/intel/oneapi
- gmake:
- buildable: false
- externals:
- - spec: gmake@4.3
- prefix: /usr
diffutils:
buildable: false
externals:
diff --git a/configs/sites/tier1/atlantis/mirrors.yaml b/configs/sites/tier1/atlantis/mirrors.yaml
new file mode 100644
index 000000000..4d363e009
--- /dev/null
+++ b/configs/sites/tier1/atlantis/mirrors.yaml
@@ -0,0 +1,18 @@
+mirrors:
+ local-source:
+ fetch:
+ url: file:///neptune_diagnostics/spack-stack/source-cache/
+ access_pair:
+ - null
+ - null
+ access_token: null
+ profile: null
+ endpoint_url: null
+ push:
+ url: file:///neptune_diagnostics/spack-stack/source-cache/
+ access_pair:
+ - null
+ - null
+ access_token: null
+ profile: null
+ endpoint_url: null
diff --git a/configs/sites/tier1/atlantis/packages_gcc.yaml b/configs/sites/tier1/atlantis/packages_gcc.yaml
new file mode 100644
index 000000000..bc10f8b75
--- /dev/null
+++ b/configs/sites/tier1/atlantis/packages_gcc.yaml
@@ -0,0 +1,13 @@
+packages:
+ all:
+ compiler:: [gcc@11.2.0]
+ providers:
+ mpi:: [openmpi@4.1.5]
+ mpi:
+ buildable: False
+ openmpi:
+ buildable: False
+ externals:
+ - spec: openmpi@4.1.5%gcc@=11.2.0~cuda~cxx~cxx_exceptions~java~memchecker+pmi~static~wrapper-rpath fabrics=ucx schedulers=slurm
+ modules:
+ - openmpi/mlnx/gcc/64/4.1.5a1
diff --git a/configs/sites/tier1/atlantis/packages_intel.yaml b/configs/sites/tier1/atlantis/packages_intel.yaml
index ea2f4de32..e6eb565f3 100644
--- a/configs/sites/tier1/atlantis/packages_intel.yaml
+++ b/configs/sites/tier1/atlantis/packages_intel.yaml
@@ -7,7 +7,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.6.0%intel@2021.6.0
+ - spec: intel-oneapi-mpi@2021.6.0%intel@2021.6.0 +classic-names
prefix: /cm/shared/apps/intel/oneapi
modules:
- mpi/2021.6.0
@@ -15,6 +15,6 @@ packages:
intel-oneapi-mkl:
externals:
- spec: intel-oneapi-mkl@2022.1.0
+ prefix: /cm/shared/apps/intel/oneapi
modules:
- mkl/2022.1.0
- prefix: /cm/shared/apps/intel/oneapi
diff --git a/configs/sites/tier1/aws-pcluster/packages_intel.yaml b/configs/sites/tier1/aws-pcluster/packages_intel.yaml
index 7fb3d8477..e0f255a05 100644
--- a/configs/sites/tier1/aws-pcluster/packages_intel.yaml
+++ b/configs/sites/tier1/aws-pcluster/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0
+ - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0 +classic-names
prefix: /opt/intel/oneapi
modules:
- libfabric-aws/1.19.0amzn4.0
diff --git a/configs/sites/tier1/blueback/compilers.yaml b/configs/sites/tier1/blueback/compilers.yaml
new file mode 100644
index 000000000..28f061c75
--- /dev/null
+++ b/configs/sites/tier1/blueback/compilers.yaml
@@ -0,0 +1,45 @@
+compilers::
+ - compiler:
+ spec: oneapi@2024.2.0
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ flags: {}
+ operating_system: sles15
+ modules:
+ - PrgEnv-intel/8.4.0
+ - intel/2024.2
+ - cray-libsci/23.05.1.4
+ - libfabric/1.12.1.2.2.1
+ environment:
+ prepend_path:
+ PATH: '/opt/cray/pe/gcc/10.3.0/snos/bin'
+ CPATH: '/opt/cray/pe/gcc/10.3.0/snos/include'
+ LD_LIBRARY_PATH: '/opt/cray/libfabric/1.12.1.2.2.1/lib64:/opt/cray/pe/libsci/23.05.1.4/INTEL/2022.2/x86_64/lib:/opt/cray/pe/gcc/10.3.0/snos/lib:/opt/cray/pe/gcc/10.3.0/snos/lib64'
+ append_path:
+ CPATH: '/opt/intel/oneapi_2024.2.0.634/compiler/2024.2/opt/compiler/include/intel64'
+ set:
+ CRAYPE_LINK_TYPE: 'dynamic'
+ extra_rpaths: []
+ - compiler:
+ spec: gcc@12.1.0
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ flags: {}
+ operating_system: sles15
+ modules:
+ - PrgEnv-gnu/8.4.0
+ - gcc/12.1.0
+ - cray-libsci/23.05.1.4
+ - libfabric/1.12.1.2.2.1
+ environment:
+ prepend_path:
+ LD_LIBRARY_PATH: '/opt/cray/libfabric/1.12.1.2.2.1/lib64:/opt/cray/pe/libsci/23.05.1.4/GNU/10.3/x86_64/lib'
+ set:
+ CRAYPE_LINK_TYPE: 'dynamic'
+ extra_rpaths: []
diff --git a/configs/sites/tier1/sandy/config.yaml b/configs/sites/tier1/blueback/config.yaml
similarity index 100%
rename from configs/sites/tier1/sandy/config.yaml
rename to configs/sites/tier1/blueback/config.yaml
diff --git a/configs/sites/tier1/blueback/mirrors.yaml b/configs/sites/tier1/blueback/mirrors.yaml
new file mode 100644
index 000000000..2b67bea03
--- /dev/null
+++ b/configs/sites/tier1/blueback/mirrors.yaml
@@ -0,0 +1,18 @@
+mirrors:
+ local-source:
+ fetch:
+ url: file:///p/cwfs/projects/NEPTUNE/spack-stack/source-cache
+ access_pair:
+ - null
+ - null
+ access_token: null
+ profile: null
+ endpoint_url: null
+ push:
+ url: file:///p/cwfs/projects/NEPTUNE/spack-stack/source-cache
+ access_pair:
+ - null
+ - null
+ access_token: null
+ profile: null
+ endpoint_url: null
diff --git a/configs/sites/tier1/blueback/modules.yaml b/configs/sites/tier1/blueback/modules.yaml
new file mode 100644
index 000000000..b134d3669
--- /dev/null
+++ b/configs/sites/tier1/blueback/modules.yaml
@@ -0,0 +1,7 @@
+modules:
+ default:
+ enable::
+ - tcl
+ tcl:
+ include:
+ - python
diff --git a/configs/sites/tier1/blueback/packages.yaml b/configs/sites/tier1/blueback/packages.yaml
new file mode 100644
index 000000000..bb4d39c4d
--- /dev/null
+++ b/configs/sites/tier1/blueback/packages.yaml
@@ -0,0 +1,129 @@
+packages:
+ autoconf:
+ externals:
+ - spec: autoconf@2.69
+ prefix: /usr
+ automake:
+ externals:
+ - spec: automake@1.15.1
+ prefix: /usr
+ binutils:
+ externals:
+ - spec: binutils@2.43.1
+ prefix: /usr
+ cmake:
+ externals:
+ - spec: cmake@3.20.4
+ prefix: /usr
+ coreutils:
+ externals:
+ - spec: coreutils@8.32
+ prefix: /usr
+ curl:
+ externals:
+ - spec: curl@8.0.1+gssapi+ldap+nghttp2
+ prefix: /usr
+ cvs:
+ externals:
+ - spec: cvs@1.12.12
+ prefix: /usr
+ diffutils:
+ externals:
+ - spec: diffutils@3.6
+ prefix: /usr
+ findutils:
+ externals:
+ - spec: findutils@4.8.0
+ prefix: /usr
+ flex:
+ externals:
+ - spec: flex@2.6.4+lex
+ prefix: /usr
+ gawk:
+ externals:
+ - spec: gawk@4.2.1
+ prefix: /usr
+ gettext:
+ externals:
+ - spec: gettext@0.20.2
+ prefix: /usr
+ git:
+ externals:
+ - spec: git@2.35.3+tcltk
+ prefix: /usr
+ git-lfs:
+ externals:
+ - spec: git-lfs@3.0.2
+ prefix: /usr
+ gmake:
+ externals:
+ - spec: gmake@4.2.1
+ prefix: /usr
+ groff:
+ externals:
+ - spec: groff@1.22.4
+ prefix: /usr
+ libtool:
+ externals:
+ - spec: libtool@2.4.6
+ prefix: /usr
+ m4:
+ externals:
+ - spec: m4@1.4.18
+ prefix: /usr
+ mysql:
+ buildable: False
+ externals:
+ - spec: mysql@8.0.31
+ prefix: /p/app/projects/NEPTUNE/spack-stack/mysql-8.0.31
+ modules:
+ - mysql/8.0.31
+ openssh:
+ externals:
+ - spec: openssh@8.4p1
+ prefix: /usr
+ - spec: openssh@9.8p1a
+ prefix: /usr/local/krb5
+ openssl:
+ externals:
+ - spec: openssl@1.1.1l-fips
+ prefix: /usr
+ perl:
+ externals:
+ - spec: perl@5.26.1~cpanm+opcode+open+shared+threads
+ prefix: /usr
+ pkg-config:
+ externals:
+ - spec: pkg-config@0.29.2
+ prefix: /usr
+ qt:
+ buildable: False
+ externals:
+ - spec: qt@5.15.2
+ prefix: /p/app/projects/NEPTUNE/spack-stack/qt-5.15.2/5.15.2/gcc_64
+ modules:
+ - qt/5.15.2
+ sed:
+ externals:
+ - spec: sed@4.4
+ prefix: /usr
+ subversion:
+ externals:
+ - spec: subversion@1.14.1
+ prefix: /usr
+ swig:
+ externals:
+ - spec: swig@3.0.12
+ prefix: /usr
+ tar:
+ externals:
+ - spec: tar@1.34
+ prefix: /usr
+ texinfo:
+ externals:
+ - spec: texinfo@6.5
+ prefix: /usr
+ wget:
+ externals:
+ - spec: wget@1.20.3
+ prefix: /usr
diff --git a/configs/sites/tier1/blueback/packages_gcc.yaml b/configs/sites/tier1/blueback/packages_gcc.yaml
new file mode 100644
index 000000000..13c9f8d48
--- /dev/null
+++ b/configs/sites/tier1/blueback/packages_gcc.yaml
@@ -0,0 +1,13 @@
+packages:
+ all:
+ compiler:: [gcc@12.1.0]
+ providers:
+ mpi:: [cray-mpich@8.1.21]
+ mpi:
+ buildable: False
+ cray-mpich:
+ externals:
+ - spec: cray-mpich@8.1.21%gcc@12.1.0 ~wrappers
+ modules:
+ - cray-mpich-ucx/8.1.21
+ - craype-network-ucx
diff --git a/configs/sites/tier1/blueback/packages_oneapi.yaml b/configs/sites/tier1/blueback/packages_oneapi.yaml
new file mode 100644
index 000000000..3bcf99f38
--- /dev/null
+++ b/configs/sites/tier1/blueback/packages_oneapi.yaml
@@ -0,0 +1,27 @@
+packages:
+ all:
+ compiler:: [oneapi@2024.2.0,gcc@10.3.0]
+ providers:
+ mpi:: [cray-mpich@8.1.21]
+ mpi:
+ buildable: False
+ cray-mpich:
+ externals:
+ - spec: cray-mpich@8.1.21%oneapi@2024.2.0 ~wrappers
+ modules:
+ - cray-mpich-ucx/8.1.21
+ - craype-network-ucx
+ intel-oneapi-mkl:
+ externals:
+ - spec: intel-oneapi-mkl@2024.2%oneapi@2024.2.0
+ prefix: /opt/intel/oneapi_2024.2.0.634
+ intel-oneapi-tbb:
+ externals:
+ - spec: intel-oneapi-tbb@2021.13%oneapi@2024.2.0
+ prefix: /opt/intel/oneapi_2024.2.0.634
+ modules:
+ - tbb/2021.13
+ intel-oneapi-runtime:
+ externals:
+ - spec: intel-oneapi-runtime@2024.2.0%oneapi@2024.2.0
+ prefix: /opt/intel/oneapi_2024.2.0.634
diff --git a/configs/sites/tier1/discover-scu16/packages_intel.yaml b/configs/sites/tier1/discover-scu16/packages_intel.yaml
index 8bd48da65..a0f857ac8 100644
--- a/configs/sites/tier1/discover-scu16/packages_intel.yaml
+++ b/configs/sites/tier1/discover-scu16/packages_intel.yaml
@@ -7,7 +7,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.6.0%intel@2021.6.0
+ - spec: intel-oneapi-mpi@2021.6.0%intel@2021.6.0 +classic-names
prefix: /usr/local/intel/oneapi/2021
modules:
- mpi/impi/2021.6.0
diff --git a/configs/sites/tier1/discover-scu17/packages_intel.yaml b/configs/sites/tier1/discover-scu17/packages_intel.yaml
index d971e2387..e28f13a05 100644
--- a/configs/sites/tier1/discover-scu17/packages_intel.yaml
+++ b/configs/sites/tier1/discover-scu17/packages_intel.yaml
@@ -7,7 +7,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.10.0%intel@=2021.10.0
+ - spec: intel-oneapi-mpi@2021.10.0%intel@=2021.10.0 +classic-names
prefix: /usr/local/intel/oneapi/2021
modules:
- mpi/impi/2021.10.0
diff --git a/configs/sites/tier1/hera/packages_intel.yaml b/configs/sites/tier1/hera/packages_intel.yaml
index 5ac52dd8b..34b77b76d 100644
--- a/configs/sites/tier1/hera/packages_intel.yaml
+++ b/configs/sites/tier1/hera/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.5.1%intel@2021.5.0
+ - spec: intel-oneapi-mpi@2021.5.1%intel@2021.5.0 +classic-names
modules:
- impi/2022.1.2
prefix: /apps/oneapi
diff --git a/configs/sites/tier1/hercules/packages_intel.yaml b/configs/sites/tier1/hercules/packages_intel.yaml
index eb4a22dc7..b58287ac3 100644
--- a/configs/sites/tier1/hercules/packages_intel.yaml
+++ b/configs/sites/tier1/hercules/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.9.0%intel@2021.9.0
+ - spec: intel-oneapi-mpi@2021.9.0%intel@2021.9.0 +classic-names
modules:
- intel-oneapi-mpi/2021.9.0
intel-oneapi-mkl:
diff --git a/configs/sites/tier1/jet/packages_intel.yaml b/configs/sites/tier1/jet/packages_intel.yaml
index 5e0b82aaf..e04c4fa67 100644
--- a/configs/sites/tier1/jet/packages_intel.yaml
+++ b/configs/sites/tier1/jet/packages_intel.yaml
@@ -7,7 +7,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.5.1%intel@2021.5.0
+ - spec: intel-oneapi-mpi@2021.5.1%intel@2021.5.0 +classic-names
modules:
- impi/2022.1.2
prefix: /apps/oneapi
diff --git a/configs/sites/tier1/narwhal/compilers.yaml b/configs/sites/tier1/narwhal/compilers.yaml
index 1b7c23dcf..ec9b170c0 100644
--- a/configs/sites/tier1/narwhal/compilers.yaml
+++ b/configs/sites/tier1/narwhal/compilers.yaml
@@ -9,7 +9,7 @@ compilers::
flags: {}
operating_system: sles15
modules:
- - PrgEnv-intel/8.3.3
+ - PrgEnv-intel/8.4.0
- intel-classic/2023.2.0
- cray-libsci/23.05.1.4
- libfabric/1.12.1.2.2.1
@@ -21,6 +21,30 @@ compilers::
set:
CRAYPE_LINK_TYPE: 'dynamic'
extra_rpaths: []
+ - compiler:
+ spec: oneapi@2024.2.0
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ flags: {}
+ operating_system: sles15
+ modules:
+ - PrgEnv-intel/8.4.0
+ - intel/2024.2
+ - cray-libsci/23.05.1.4
+ - libfabric/1.12.1.2.2.1
+ environment:
+ prepend_path:
+ PATH: '/opt/cray/pe/gcc/10.3.0/snos/bin'
+ CPATH: '/opt/cray/pe/gcc/10.3.0/snos/include'
+ LD_LIBRARY_PATH: '/opt/cray/libfabric/1.12.1.2.2.1/lib64:/opt/cray/pe/libsci/23.05.1.4/INTEL/2022.2/x86_64/lib:/opt/cray/pe/gcc/10.3.0/snos/lib:/opt/cray/pe/gcc/10.3.0/snos/lib64'
+ append_path:
+ CPATH: '/opt/intel/oneapi_2024.2.0.634/compiler/2024.2/opt/compiler/include/intel64'
+ set:
+ CRAYPE_LINK_TYPE: 'dynamic'
+ extra_rpaths: []
- compiler:
spec: gcc@10.3.0
paths:
@@ -31,7 +55,7 @@ compilers::
flags: {}
operating_system: sles15
modules:
- - PrgEnv-gnu/8.3.3
+ - PrgEnv-gnu/8.4.0
- gcc/10.3.0
- cray-libsci/23.05.1.4
- libfabric/1.12.1.2.2.1
diff --git a/configs/sites/tier1/narwhal/mirrors.yaml b/configs/sites/tier1/narwhal/mirrors.yaml
index 709f9096c..2b67bea03 100644
--- a/configs/sites/tier1/narwhal/mirrors.yaml
+++ b/configs/sites/tier1/narwhal/mirrors.yaml
@@ -1,7 +1,7 @@
mirrors:
local-source:
fetch:
- url: file:///p/app/projects/NEPTUNE/spack-stack/source-cache
+ url: file:///p/cwfs/projects/NEPTUNE/spack-stack/source-cache
access_pair:
- null
- null
@@ -9,7 +9,7 @@ mirrors:
profile: null
endpoint_url: null
push:
- url: file:///p/app/projects/NEPTUNE/spack-stack/source-cache
+ url: file:///p/cwfs/projects/NEPTUNE/spack-stack/source-cache
access_pair:
- null
- null
diff --git a/configs/sites/tier1/narwhal/packages_oneapi.yaml b/configs/sites/tier1/narwhal/packages_oneapi.yaml
new file mode 100644
index 000000000..3bcf99f38
--- /dev/null
+++ b/configs/sites/tier1/narwhal/packages_oneapi.yaml
@@ -0,0 +1,27 @@
+packages:
+ all:
+ compiler:: [oneapi@2024.2.0,gcc@10.3.0]
+ providers:
+ mpi:: [cray-mpich@8.1.21]
+ mpi:
+ buildable: False
+ cray-mpich:
+ externals:
+ - spec: cray-mpich@8.1.21%oneapi@2024.2.0 ~wrappers
+ modules:
+ - cray-mpich-ucx/8.1.21
+ - craype-network-ucx
+ intel-oneapi-mkl:
+ externals:
+ - spec: intel-oneapi-mkl@2024.2%oneapi@2024.2.0
+ prefix: /opt/intel/oneapi_2024.2.0.634
+ intel-oneapi-tbb:
+ externals:
+ - spec: intel-oneapi-tbb@2021.13%oneapi@2024.2.0
+ prefix: /opt/intel/oneapi_2024.2.0.634
+ modules:
+ - tbb/2021.13
+ intel-oneapi-runtime:
+ externals:
+ - spec: intel-oneapi-runtime@2024.2.0%oneapi@2024.2.0
+ prefix: /opt/intel/oneapi_2024.2.0.634
diff --git a/configs/sites/tier1/nautilus/compilers.yaml b/configs/sites/tier1/nautilus/compilers.yaml
index aff3f05dd..147614a13 100644
--- a/configs/sites/tier1/nautilus/compilers.yaml
+++ b/configs/sites/tier1/nautilus/compilers.yaml
@@ -59,6 +59,31 @@ compilers:
append_path:
CPATH: '/p/app/projects/NEPTUNE/spack-stack/oneapi-2024.2.1/compiler/2024.2/opt/compiler/include/intel64'
extra_rpaths: []
+#- compiler:
+# spec: oneapi@2025.0.0
+# paths:
+# cc: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0/compiler/2025.0/bin/icx
+# cxx: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0/compiler/2025.0/bin/icpx
+# f77: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0/compiler/2025.0/bin/ifx
+# fc: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0/compiler/2025.0/bin/ifx
+# flags: {}
+# operating_system: rhel8
+# target: x86_64
+# modules:
+# - slurm
+# - tbb/2022.0
+# - compiler-rt/2025.0.0
+# - umf/0.9.0
+# - compiler-intel-llvm/2025.0.0
+# environment:
+# prepend_path:
+# PATH: '/opt/rh/gcc-toolset-11/root/usr/bin'
+# CPATH: '/opt/rh/gcc-toolset-11/root/usr/include'
+# LD_LIBRARY_PATH: '/opt/scyld/slurm/lib64:/opt/scyld/slurm/lib64/slurm:/opt/rh/gcc-toolset-11/root/usr/lib64:/opt/rh/gcc-toolset-11/root/usr/lib'
+# MODULEPATH: '/p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0/modulefiles'
+# append_path:
+# CPATH: '/p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0/compiler/2025.0/opt/compiler/include/intel64'
+# extra_rpaths: []
- compiler:
spec: gcc@12.2.1
paths:
diff --git a/configs/sites/tier1/nautilus/mirrors.yaml b/configs/sites/tier1/nautilus/mirrors.yaml
index 709f9096c..2b67bea03 100644
--- a/configs/sites/tier1/nautilus/mirrors.yaml
+++ b/configs/sites/tier1/nautilus/mirrors.yaml
@@ -1,7 +1,7 @@
mirrors:
local-source:
fetch:
- url: file:///p/app/projects/NEPTUNE/spack-stack/source-cache
+ url: file:///p/cwfs/projects/NEPTUNE/spack-stack/source-cache
access_pair:
- null
- null
@@ -9,7 +9,7 @@ mirrors:
profile: null
endpoint_url: null
push:
- url: file:///p/app/projects/NEPTUNE/spack-stack/source-cache
+ url: file:///p/cwfs/projects/NEPTUNE/spack-stack/source-cache
access_pair:
- null
- null
diff --git a/configs/sites/tier1/nautilus/packages_oneapi.yaml b/configs/sites/tier1/nautilus/packages_oneapi.yaml
index 53254442f..263c72bf9 100644
--- a/configs/sites/tier1/nautilus/packages_oneapi.yaml
+++ b/configs/sites/tier1/nautilus/packages_oneapi.yaml
@@ -1,8 +1,10 @@
packages:
all:
compiler:: [oneapi@2024.2.1,gcc@11.2.1]
+ #compiler:: [oneapi@2025.0.0, gcc@11.2.1]
providers:
mpi:: [intel-oneapi-mpi@2021.13]
+ #mpi:: [intel-oneapi-mpi@2021.14]
mpi:
buildable: False
intel-oneapi-mpi:
@@ -11,18 +13,30 @@ packages:
prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2024.2.1
modules:
- mpi/2021.13
+ #- spec: intel-oneapi-mpi@2021.14%oneapi@2025.0.0
+ # prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0
+ # modules:
+ # - mpi/2021.14
intel-oneapi-mkl:
externals:
- spec: intel-oneapi-mkl@2024.2%oneapi@2024.2.1
prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2024.2.1
modules:
- mkl/2024.2
+ #- spec: intel-oneapi-mkl@2025.0%oneapi@2025.0.0
+ # prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0
+ # modules:
+ # - mkl/2025.0
intel-oneapi-tbb:
externals:
- spec: intel-oneapi-tbb@2021.13%oneapi@2024.2.1
prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2024.2.1
modules:
- tbb/2021.13
+ #- spec: intel-oneapi-tbb@2022.0%oneapi@2025.0.0
+ # prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0
+ # modules:
+ # - tbb/2022.0
intel-oneapi-runtime:
externals:
- spec: intel-oneapi-runtime@2024.2.1%oneapi@2024.2.1
@@ -30,3 +44,8 @@ packages:
modules:
- tbb/2021.13
- compiler-rt/2024.2.1
+ #- spec: intel-oneapi-runtime@2025.0.0%oneapi@2025.0.0
+ # prefix: /p/app/projects/NEPTUNE/spack-stack/oneapi-2025.0.0
+ # modules:
+ # - tbb/2022.0
+ # - compiler-rt/2025.0.0
diff --git a/configs/sites/tier1/navy-aws/README.md b/configs/sites/tier1/navy-aws/README.md
new file mode 100644
index 000000000..b2b0e58f0
--- /dev/null
+++ b/configs/sites/tier1/navy-aws/README.md
@@ -0,0 +1,41 @@
+# Provisiong NRL ParallelWorks AWS clusters
+
+## Steps to perform before installing spack-stack develop as of 2024/11/24
+
+Note. Some of these packages may already be installed, but for the sake of completeness, they are listed here.
+```
+sudo su -
+chmod 777 /contrib
+
+yum install -y gcc-toolset-13
+yum install -y gcc-toolset-13-runtime
+yum install -y gcc-toolset-13-binutils
+yum install -y gcc-toolset-13-gcc
+yum install -y gcc-toolset-13-gcc-c++
+yum install -y gcc-toolset-13-gcc-gfortran
+yum install -y gcc-toolset-13-gdb
+
+yum install -y binutils-devel
+yum install -y m4
+yum install -y wget
+yum install -y git
+yum install -y git-lfs
+yum install -y bash-completion
+yum install -y bzip2 bzip2-devel
+yum install -y unzip
+yum install -y patch
+yum install -y automake
+yum install -y xorg-x11-xauth
+yum install -y xterm
+yum install -y perl-IPC-Cmd
+yum install -y gettext-devel
+yum install -y texlive
+yum install -y bison
+yum install -y screen
+
+yum install -y qt5-qtbase
+yum install -y qt5-qttools-devel
+yum install -y qt5-qtsvg-devel
+```
+
+For instructions for building spack-stack, see the spack-stack documentation on readthedocs (https://spack-stack.readthedocs.io/en/latest). For instructions for using pre-built spack-stack environments, see the spack-stack wiki (https://github.com/JCSDA/spack-stack/wiki).
diff --git a/configs/sites/tier1/navy-aws/compilers.yaml b/configs/sites/tier1/navy-aws/compilers.yaml
new file mode 100644
index 000000000..4c161e8cd
--- /dev/null
+++ b/configs/sites/tier1/navy-aws/compilers.yaml
@@ -0,0 +1,28 @@
+compilers:
+- compiler:
+ spec: gcc@=13.3.1
+ paths:
+ cc: /opt/rh/gcc-toolset-13/root/usr/bin/gcc
+ cxx: /opt/rh/gcc-toolset-13/root/usr/bin/g++
+ f77: /opt/rh/gcc-toolset-13/root/usr/bin/gfortran
+ fc: /opt/rh/gcc-toolset-13/root/usr/bin/gfortran
+ flags: {}
+ operating_system: rocky8
+ target: x86_64
+ modules: []
+ environment: {}
+ extra_rpaths: []
+- compiler:
+ spec: gcc@=8.5.0
+ paths:
+ cc: /usr/bin/gcc
+ cxx: /usr/bin/g++
+ f77: /usr/bin/gfortran
+ fc: /usr/bin/gfortran
+ flags: {}
+ operating_system: rocky8
+ target: x86_64
+ modules: []
+ environment: {}
+ extra_rpaths: []
+
diff --git a/configs/sites/tier1/navy-aws/config.yaml b/configs/sites/tier1/navy-aws/config.yaml
new file mode 100644
index 000000000..d01ba35fe
--- /dev/null
+++ b/configs/sites/tier1/navy-aws/config.yaml
@@ -0,0 +1,10 @@
+config:
+ build_jobs: 6
+
+ # Overrides for spack build and staging areas to speed up builds
+ # and avoid errors with hard links on the NFS filesystem /contrib
+
+ build_stage: /tmp/spack-stack/cache/build_stage
+ test_stage: /tmp/spack-stack/cache/test_stage
+ source_cache: /tmp/spack-stack/cache/source_cache
+ misc_cache: /tmp/spack-stack/cache/misc_cache
diff --git a/configs/sites/tier1/sandy/mirrors.yaml b/configs/sites/tier1/navy-aws/mirrors.yaml
similarity index 69%
rename from configs/sites/tier1/sandy/mirrors.yaml
rename to configs/sites/tier1/navy-aws/mirrors.yaml
index c48f33bf1..3d247981f 100644
--- a/configs/sites/tier1/sandy/mirrors.yaml
+++ b/configs/sites/tier1/navy-aws/mirrors.yaml
@@ -1,7 +1,7 @@
mirrors:
local-source:
fetch:
- url: file:///gpfs/fs1/neptune/spack-stack/source-cache
+ url: file:///contrib/spack-stack/source-cache
access_pair:
- null
- null
@@ -9,7 +9,7 @@ mirrors:
profile: null
endpoint_url: null
push:
- url: file:///gpfs/fs1/neptune/spack-stack/source-cache
+ url: file:///contrib/spack-stack/source-cache
access_pair:
- null
- null
diff --git a/configs/sites/tier1/navy-aws/modules.yaml b/configs/sites/tier1/navy-aws/modules.yaml
new file mode 100644
index 000000000..b134d3669
--- /dev/null
+++ b/configs/sites/tier1/navy-aws/modules.yaml
@@ -0,0 +1,7 @@
+modules:
+ default:
+ enable::
+ - tcl
+ tcl:
+ include:
+ - python
diff --git a/configs/sites/tier1/sandy/packages.yaml b/configs/sites/tier1/navy-aws/packages.yaml
similarity index 63%
rename from configs/sites/tier1/sandy/packages.yaml
rename to configs/sites/tier1/navy-aws/packages.yaml
index 1af1ab029..7ac471cdf 100644
--- a/configs/sites/tier1/sandy/packages.yaml
+++ b/configs/sites/tier1/navy-aws/packages.yaml
@@ -1,18 +1,4 @@
packages:
- all:
- compiler:: [gcc@9.3.0]
- providers:
- mpi:: [openmpi@4.0.5]
-
-### MPI, Python, MKL
- mpi:
- buildable: False
- openmpi:
- externals:
- - spec: openmpi@4.0.5%gcc@9.3.0
- prefix: /software7/depot/openmpi-4.0.5
-
-### All other external packages listed alphabetically
autoconf:
externals:
- spec: autoconf@2.69
@@ -23,24 +9,34 @@ packages:
prefix: /usr
binutils:
externals:
- - spec: binutils@2.30.117
+ - spec: binutils@2.40.21~gold~headers
+ prefix: /opt/rh/gcc-toolset-13/root/usr
+ - spec: binutils@2.30.123~gold+headers
prefix: /usr
bison:
externals:
- spec: bison@3.0.4
prefix: /usr
+ cmake:
+ externals:
+ - spec: cmake@3.26.5
+ prefix: /usr
coreutils:
externals:
- spec: coreutils@8.30
prefix: /usr
- curl:
+ cvs:
externals:
- - spec: curl@7.61.1
+ - spec: cvs@1.11.23
prefix: /usr
diffutils:
externals:
- spec: diffutils@3.6
prefix: /usr
+ doxygen:
+ externals:
+ - spec: doxygen@1.8.14+graphviz~mscgen
+ prefix: /usr
findutils:
externals:
- spec: findutils@4.6.0
@@ -53,14 +49,18 @@ packages:
externals:
- spec: gawk@4.2.1
prefix: /usr
+ gettext:
+ externals:
+ - spec: gettext@0.19.8.1
+ prefix: /usr
git:
externals:
- - spec: git@2.39.2+tcltk
- prefix: /software8/depot/git-2.39.2
+ - spec: git@2.43.5+tcltk
+ prefix: /usr
git-lfs:
externals:
- - spec: git-lfs@3.3.0
- prefix: /software8/depot/git-2.39.2
+ - spec: git-lfs@3.4.1
+ prefix: /usr
gmake:
externals:
- spec: gmake@4.2.1
@@ -81,20 +81,23 @@ packages:
externals:
- spec: openssh@8.0p1
prefix: /usr
- # Don't use external openssl, too old
- #openssl:
- # externals:
- # - spec: openssl@1.1.1k
- # prefix: /usr
- # Can't use with py-xnrl
- #perl:
- # externals:
- # - spec: perl@5.26.3~cpanm+shared+threads
- # prefix: /usr
+ perl:
+ externals:
+ - spec: perl@5.26.3~cpanm+opcode+open+shared+threads
+ prefix: /usr
pkgconf:
externals:
- spec: pkgconf@1.4.2
prefix: /usr
+ qt:
+ buildable: False
+ externals:
+ - spec: qt@5.15.3
+ prefix: /usr/lib64/qt5
+ sed:
+ externals:
+ - spec: sed@4.5
+ prefix: /usr
subversion:
externals:
- spec: subversion@1.10.2
@@ -107,7 +110,15 @@ packages:
externals:
- spec: texinfo@6.5
prefix: /usr
+ texlive:
+ externals:
+ - spec: texlive@2018
+ prefix: /usr
wget:
externals:
- spec: wget@1.19.5
prefix: /usr
+ zlib:
+ externals:
+ - spec: zlib@1.2.11
+ prefix: /usr
diff --git a/configs/sites/tier1/navy-aws/packages_gcc.yaml b/configs/sites/tier1/navy-aws/packages_gcc.yaml
new file mode 100644
index 000000000..b8f1b3c48
--- /dev/null
+++ b/configs/sites/tier1/navy-aws/packages_gcc.yaml
@@ -0,0 +1,15 @@
+packages:
+ all:
+ compiler:: [gcc@13.3.1]
+ providers:
+ mpi:: [openmpi@5.0.5]
+ mpi:
+ buildable: False
+ openmpi:
+ buildable: False
+ externals:
+ - spec: openmpi@5.0.5%gcc@13.3.1 fabrics=auto ~gpfs ~internal-hwloc ~internal-libevent
+ ~internal-pmix +lustre +openshmem schedulers=slurm +romio romio-filesystem=lustre
+ modules:
+ - libfabric-aws/1.22.0amzn2.0
+ - openmpi5/5.0.5
diff --git a/configs/sites/tier1/noaa-aws/packages_intel.yaml b/configs/sites/tier1/noaa-aws/packages_intel.yaml
index 1ffe7cf2f..5cc0e66d7 100644
--- a/configs/sites/tier1/noaa-aws/packages_intel.yaml
+++ b/configs/sites/tier1/noaa-aws/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0
+ - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0 +classic-names
prefix: /apps/oneapi
modules:
- impi/2023.2.0
diff --git a/configs/sites/tier1/noaa-azure/packages_intel.yaml b/configs/sites/tier1/noaa-azure/packages_intel.yaml
index 1ffe7cf2f..5cc0e66d7 100644
--- a/configs/sites/tier1/noaa-azure/packages_intel.yaml
+++ b/configs/sites/tier1/noaa-azure/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0
+ - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0 +classic-names
prefix: /apps/oneapi
modules:
- impi/2023.2.0
diff --git a/configs/sites/tier1/noaa-gcloud/packages_intel.yaml b/configs/sites/tier1/noaa-gcloud/packages_intel.yaml
index 1ffe7cf2f..5cc0e66d7 100644
--- a/configs/sites/tier1/noaa-gcloud/packages_intel.yaml
+++ b/configs/sites/tier1/noaa-gcloud/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0
+ - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0 +classic-names
prefix: /apps/oneapi
modules:
- impi/2023.2.0
diff --git a/configs/sites/tier1/orion/packages_intel.yaml b/configs/sites/tier1/orion/packages_intel.yaml
index eb4a22dc7..b58287ac3 100644
--- a/configs/sites/tier1/orion/packages_intel.yaml
+++ b/configs/sites/tier1/orion/packages_intel.yaml
@@ -11,7 +11,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.9.0%intel@2021.9.0
+ - spec: intel-oneapi-mpi@2021.9.0%intel@2021.9.0 +classic-names
modules:
- intel-oneapi-mpi/2021.9.0
intel-oneapi-mkl:
diff --git a/configs/sites/tier1/s4/packages.yaml b/configs/sites/tier1/s4/packages.yaml
index a2457222e..ed2478503 100644
--- a/configs/sites/tier1/s4/packages.yaml
+++ b/configs/sites/tier1/s4/packages.yaml
@@ -13,7 +13,7 @@ packages:
buildable: False
intel-oneapi-mpi:
externals:
- - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0
+ - spec: intel-oneapi-mpi@2021.10.0%intel@2021.10.0 +classic-names
prefix: /opt/intel/oneapi/2023.2
modules:
- intel/2023.2
diff --git a/configs/sites/tier1/sandy/compilers.yaml b/configs/sites/tier1/sandy/compilers.yaml
deleted file mode 100644
index b86ff0a33..000000000
--- a/configs/sites/tier1/sandy/compilers.yaml
+++ /dev/null
@@ -1,14 +0,0 @@
-compilers:
-- compiler:
- spec: gcc@=9.3.0
- paths:
- cc: /software/depot/gcc-9.3.0/bin/gcc
- cxx: /software/depot/gcc-9.3.0/bin/g++
- f77: /software/depot/gcc-9.3.0/bin/gfortran
- fc: /software/depot/gcc-9.3.0/bin/gfortran
- flags: {}
- operating_system: centos7
- target: x86_64
- modules: []
- environment: {}
- extra_rpaths: []
diff --git a/configs/sites/tier1/sandy/modules.yaml b/configs/sites/tier1/sandy/modules.yaml
deleted file mode 100644
index aeb254c2e..000000000
--- a/configs/sites/tier1/sandy/modules.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-modules:
- default:
- enable::
- - tcl
- tcl:
- include:
- # List of packages for which we need modules that are blacklisted by default
- - python
diff --git a/configs/sites/tier2/blackpearl/config.yaml b/configs/sites/tier2/blackpearl/config.yaml
index 77a2e80ad..0eb7669fb 100644
--- a/configs/sites/tier2/blackpearl/config.yaml
+++ b/configs/sites/tier2/blackpearl/config.yaml
@@ -1,2 +1,2 @@
config:
- build_jobs: 2
+ build_jobs: 1
diff --git a/configs/sites/tier2/blackpearl/packages_gcc.yaml b/configs/sites/tier2/blackpearl/packages_gcc.yaml
index a389fbcfc..730601342 100644
--- a/configs/sites/tier2/blackpearl/packages_gcc.yaml
+++ b/configs/sites/tier2/blackpearl/packages_gcc.yaml
@@ -2,4 +2,4 @@ packages:
all:
compiler:: [gcc@13.3.0]
providers:
- mpi:: [openmpi@5.0.3]
+ mpi:: [openmpi@5.0.5]
diff --git a/configs/sites/tier2/bounty/packages_gcc.yaml b/configs/sites/tier2/bounty/packages_gcc.yaml
index a91408366..0ddd36693 100644
--- a/configs/sites/tier2/bounty/packages_gcc.yaml
+++ b/configs/sites/tier2/bounty/packages_gcc.yaml
@@ -2,4 +2,4 @@ packages:
all:
compiler:: [gcc@12.3.0]
providers:
- mpi:: [openmpi@5.0.3]
+ mpi:: [openmpi@5.0.5]
diff --git a/configs/templates/nco/spack.yaml b/configs/templates/nco/spack.yaml
index 12787de13..e1263204e 100644
--- a/configs/templates/nco/spack.yaml
+++ b/configs/templates/nco/spack.yaml
@@ -35,7 +35,7 @@ spack:
- g2
- g2c
- g2tmpl
- - gdal
+ - gdal@3.8.5
- geos
- gfsio
- gftl-shared
diff --git a/configs/templates/neptune-dev/spack.yaml b/configs/templates/neptune-dev/spack.yaml
index 5f3332f79..9a9e3dfec 100644
--- a/configs/templates/neptune-dev/spack.yaml
+++ b/configs/templates/neptune-dev/spack.yaml
@@ -8,7 +8,8 @@ spack:
definitions:
- compilers: ['%aocc', '%apple-clang', '%gcc', '%intel', '%oneapi']
- packages:
- - neptune-env +espc +python +xnrl ^esmf@8.7.0b11 snapshot=b11
+ - neptune-env +espc ^esmf@8.7.0
+ - neptune-python-env +xnrl ^esmf@8.7.0
specs:
- matrix:
diff --git a/configs/templates/skylab-dev/spack.yaml b/configs/templates/skylab-dev/spack.yaml
index 1ba272a63..be901ec7e 100644
--- a/configs/templates/skylab-dev/spack.yaml
+++ b/configs/templates/skylab-dev/spack.yaml
@@ -14,10 +14,11 @@ spack:
- jedi-fv3-env
- jedi-geos-env ^esmf@=8.6.1
- jedi-mpas-env
- - jedi-neptune-env ^esmf@=8.7.0b11 snapshot=b11
+ - jedi-neptune-env ^esmf@=8.7.0
- jedi-ufs-env ^esmf@=8.6.1
- jedi-um-env
- - neptune-env ~espc +python ~xnrl ^esmf@=8.7.0b11 snapshot=b11
+ - neptune-env ^esmf@=8.7.0
+ - neptune-python-env ^esmf@=8.7.0
- soca-env
# Various crtm tags (list all to avoid duplicate packages)
@@ -26,7 +27,7 @@ spack:
# Various esmf tags (list all to avoid duplicate packages)
- esmf@=8.6.1 snapshot=none
- - esmf@=8.7.0b11 snapshot=b11
+ - esmf@=8.7.0 snapshot=none
specs:
- matrix:
diff --git a/configs/templates/unified-dev/spack.yaml b/configs/templates/unified-dev/spack.yaml
index a05ed4a75..744789778 100644
--- a/configs/templates/unified-dev/spack.yaml
+++ b/configs/templates/unified-dev/spack.yaml
@@ -13,15 +13,16 @@ spack:
- geos-gcm-env ^esmf@=8.6.1
- global-workflow-env ^esmf@=8.6.1
- gmao-swell-env
- - gsi-env ^esmf@=8.6.1
+ - gsi-env
- jedi-fv3-env
- jedi-geos-env ^esmf@=8.6.1
- jedi-mpas-env
- - jedi-neptune-env ^esmf@=8.7.0b11 snapshot=b11
+ - jedi-neptune-env ^esmf@=8.7.0
- jedi-tools-env
- jedi-ufs-env ^esmf@=8.6.1
- jedi-um-env
- - neptune-env ~espc +python ~xnrl ^esmf@=8.7.0b11 snapshot=b11
+ - neptune-env ^esmf@=8.7.0
+ - neptune-python-env ^esmf@=8.7.0
- soca-env
- ufs-srw-app-env ^esmf@=8.6.1
- ufs-weather-model-env ^esmf@=8.6.1
@@ -32,7 +33,7 @@ spack:
# Various esmf tags (list all to avoid duplicate packages)
- esmf@=8.6.1 snapshot=none
- - esmf@=8.7.0b11 snapshot=b11
+ - esmf@=8.7.0 snapshot=none
# MADIS for WCOSS2 decoders.
- madis@4.5
diff --git a/doc/CMakeLists.txt b/doc/CMakeLists.txt
index 83acb6fc6..0d21c6cf6 100644
--- a/doc/CMakeLists.txt
+++ b/doc/CMakeLists.txt
@@ -1,15 +1,9 @@
cmake_minimum_required(VERSION 3.18 FATAL_ERROR)
-project(spack-stack VERSION 1.3.1)
+project(spack-stack VERSION 1.9.0)
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMakeModules/Modules")
-option(SPHINX_OUTPUT_HTML "Build HTML documentation" ON)
-option(SPHINX_OUTPUT_LATEX "Build LaTeX (PDF) documentation" OFF)
-
-message(STATUS "SPHINX_OUTPUT_HTML: ${SPHINX_OUTPUT_HTML}")
-message(STATUS "SPHINX_OUTPUT_LATEX: ${SPHINX_OUTPUT_LATEX}")
-
find_package(Sphinx REQUIRED)
# configured documentation tools and intermediate build results
@@ -20,51 +14,17 @@ set(SPHINX_CACHE_DIR "${CMAKE_CURRENT_BINARY_DIR}/_doctrees")
# HTML output directory
set(SPHINX_HTML_DIR "${CMAKE_CURRENT_BINARY_DIR}/html")
-
-# LaTeX output directory
-set(SPHINX_PDF_DIR "${CMAKE_CURRENT_BINARY_DIR}/pdf")
configure_file(
"${CMAKE_CURRENT_SOURCE_DIR}/source/conf.py"
"${BINARY_BUILD_DIR}/conf.py"
@ONLY)
-if(SPHINX_OUTPUT_HTML)
- add_custom_target(spack-stack-doc-html ALL
- ${SPHINX_EXECUTABLE}
- -q -b html
- -c "${BINARY_BUILD_DIR}"
- -d "${SPHINX_CACHE_DIR}"
- "${CMAKE_CURRENT_SOURCE_DIR}/source"
- "${SPHINX_HTML_DIR}"
- COMMENT "Building HTML documentation with Sphinx")
-endif()
-
-if(SPHINX_OUTPUT_LATEX)
- find_package(LATEX COMPONENTS PDFLATEX REQUIRED)
- set(PDF_OUTPUT_FILE "${SPHINX_PDF_DIR}/spack-stack.pdf")
- # Create LaTeX input file with Sphinx
- add_custom_command(OUTPUT "${SPHINX_PDF_DIR}"
- COMMAND "${SPHINX_EXECUTABLE}" -q -b latex
- -c "${BINARY_BUILD_DIR}"
- -d "${SPHINX_CACHE_DIR}"
- "${CMAKE_CURRENT_SOURCE_DIR}/source"
- "${SPHINX_PDF_DIR}"
- COMMENT "Running Sphinx to generate documentation (LaTeX)"
- VERBATIM
- )
- # Create PDF output file with pdflatex
- add_custom_command(OUTPUT "${PDF_OUTPUT_FILE}"
- COMMAND "${CMAKE_MAKE_PROGRAM}" all-pdf
- WORKING_DIRECTORY "${SPHINX_PDF_DIR}"
- DEPENDS "${SPHINX_PDF_DIR}"
- COMMENT "Generating PDF version of documentation"
- VERBATIM
- )
- #
- add_custom_target(spack-stack-doc-pdf ALL
- COMMENT "Generating PDF version of documentation ..."
- SOURCES "${PDF_OUTPUT_FILE}"
- VERBATIM
- )
-endif()
+add_custom_target(spack-stack-doc-html ALL
+ ${SPHINX_EXECUTABLE}
+ -q -b html
+ -c "${BINARY_BUILD_DIR}"
+ -d "${SPHINX_CACHE_DIR}"
+ "${CMAKE_CURRENT_SOURCE_DIR}/source"
+ "${SPHINX_HTML_DIR}"
+ COMMENT "Building HTML documentation with Sphinx")
diff --git a/doc/modulefile_templates/cmake b/doc/modulefile_templates/cmake
deleted file mode 100644
index 2478d236b..000000000
--- a/doc/modulefile_templates/cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-#%Module1.0
-
-module-whatis "Provides a cmake-3.27.2 installation for use with spack."
-
-conflict cmake
-
-proc ModulesHelp { } {
-puts stderr "Provides a cmake-3.27.2 installation for use with spack."
-}
-
-# Set this value
-set CMAKE_PATH "/contrib/spack-stack/cmake-3.27.2"
-
-prepend-path PATH "${CMAKE_PATH}/bin"
-prepend-path MANPATH "${CMAKE_PATH}/man"
diff --git a/doc/modulefile_templates/ecflow b/doc/modulefile_templates/ecflow
deleted file mode 100644
index 037421120..000000000
--- a/doc/modulefile_templates/ecflow
+++ /dev/null
@@ -1,28 +0,0 @@
-#%Module1.0
-
-module-whatis "Provides an ecflow-5.8.4 server+ui installation for use with spack."
-
-conflict ecflow
-
-proc ModulesHelp { } {
-puts stderr "Provides an ecflow-5.8.4 server+ui installation for use with spack."
-}
-
-# If it is necessary to load a module for Qt (e.g. when not installed
-# in the OS system paths), use the following logic; otherwise remove.
-if { [ module-info mode load ] && ![ is-loaded qt/5.15.2 ] } {
- module load qt/5.15.2
-}
-
-# Set this value
-set ECFLOW_PATH "/discover/swdev/jcsda/spack-stack/ecflow-5.8.4"
-
-setenv ecflow_ROOT "${ECFLOW_PATH}"
-prepend-path PATH "${ECFLOW_PATH}/bin"
-prepend-path LD_LIBRARY_PATH "${ECFLOW_PATH}/lib"
-prepend-path LD_LIBRARY_PATH "${ECFLOW_PATH}/lib64"
-prepend-path LIBRARY_PATH "${ECFLOW_PATH}/lib"
-prepend-path LIBRARY_PATH "${ECFLOW_PATH}/lib64"
-prepend-path CPATH "${ECFLOW_PATH}/include"
-prepend-path CMAKE_PREFIX_PATH "${ECFLOW_PATH}"
-prepend-path PYTHONPATH "${ECFLOW_PATH}/lib/python3.9/site-packages"
\ No newline at end of file
diff --git a/doc/modulefile_templates/miniconda b/doc/modulefile_templates/miniconda
deleted file mode 100644
index 24e412aa4..000000000
--- a/doc/modulefile_templates/miniconda
+++ /dev/null
@@ -1,22 +0,0 @@
-#%Module1.0
-
-module-whatis "Provides a basic python-3.9.12 installation based on miniconda for use with spack."
-
-conflict python,anaconda,conda,miniconda,intelpython
-#family python
-
-proc ModulesHelp { } {
-puts stderr "Provides a basic python-3.9.12 installation based on miniconda for use with spack."
-}
-
-# Set this value
-set MINICONDA_PATH "/glade/work/jedipara/cheyenne/spack-stack/miniconda-3.9.12"
-
-prepend-path PATH "${MINICONDA_PATH}/bin"
-prepend-path MANPATH "${MINICONDA_PATH}/share/man"
-prepend-path LD_LIBRARY_PATH "${MINICONDA_PATH}/lib"
-prepend-path LIBRARY_PATH "${MINICONDA_PATH}/lib"
-prepend-path CPATH "${MINICONDA_PATH}/include"
-prepend-path CMAKE_PREFIX_PATH "${MINICONDA_PATH}"
-prepend-path PYTHONPATH "${MINICONDA_PATH}/lib/python3.9/site-packages"
-
diff --git a/doc/modulefile_templates/mvapich2 b/doc/modulefile_templates/mvapich2
deleted file mode 100644
index 042c16358..000000000
--- a/doc/modulefile_templates/mvapich2
+++ /dev/null
@@ -1,35 +0,0 @@
-#%Module1.0
-
-module-whatis "Provides an mvapich2-2.3.7 installation for use with spack and gcc-13.3.1."
-
-conflict openmpi
-conflict mvapich2
-conflict mpi
-conflict intel-mpi
-conflict intel-oneapi-mpi
-
-proc ModulesHelp { } {
-puts stderr "Provides an mvapich2-2.3.7 installation for use with spack and gcc-13.3.1."
-}
-
-if { [ module-info mode load ] && ![ is-loaded slurm/22.05.8 ] } {
- module load slurm/22.05.8
-}
-#if { [ module-info mode load ] && ![ is-loaded ucx/1.13.1 ] } {
-# module load ucx/1.13.1
-#`}
-
-# Set this value
-set MPICH_PATH "/work/noaa/epic/role-epic/spack-stack/hercules/mvapich2-2.3.7/gcc-11.3.1"
-
-prepend-path PATH "${MPICH_PATH}/bin"
-prepend-path LD_LIBRARY_PATH "${MPICH_PATH}/lib"
-prepend-path LIBRARY_PATH "${MPICH_PATH}/lib"
-prepend-path CPATH "${MPICH_PATH}/include"
-prepend-path CMAKE_PREFIX_PATH "${MPICH_PATH}"
-prepend-path MANPATH "${MPICH_PATH}/share/man"
-
-# Settings specific for Hercules
-setenv MPI_ROOT ${MPICH_PATH}
-setenv SLURM_MPI_TYPE "pmi2"
-setenv MV2_HOMOGENEOUS_CLUSTER "1"
diff --git a/doc/requirements.txt b/doc/requirements.txt
index 25b341bdf..e4d7905c5 100644
--- a/doc/requirements.txt
+++ b/doc/requirements.txt
@@ -1,2 +1 @@
-sphinxcontrib-bibtex
urllib3<2
diff --git a/doc/source/KnownIssues.rst b/doc/source/KnownIssues.rst
index 2e2c2171a..6d411af8a 100644
--- a/doc/source/KnownIssues.rst
+++ b/doc/source/KnownIssues.rst
@@ -109,10 +109,6 @@ macOS
Can happen when trying to use the raster plotting scripts in ``fv3-jedi-tools``. In that case, exporting ``DYLD_LIBRARY_PATH=/usr/lib/:$DYLD_LIBRARY_PATH`` can help. If ``git`` commands fail after this, you might need to verify where ``which git`` points to (Homebrew vs module) and unload the ``git`` module.
-6. Not all versions of ``apple-clang@15.0.0`` are currently supported.
-
- If you are running macOS Sonoma 14.5 you may have a recent version of clang that does not yet build spack packages. On the console you should run ``clang --version`` and verify that the underlying clang compiler is ``clang-1500.1.0.2.5`` which is the version provided with Command Line Tools for XCode 15.1. You can downgrade your Command Line Tool packages by first running ``sudo rm -rf /Library/Developer/CommandLineTools`` then you can download and install the Command Line Tools 15.1 APK from `developer.apple.com `_. The Apple Developer site requires an iCloud login.
-
==============================
Ubuntu
==============================
diff --git a/doc/source/MaintainersSection.rst b/doc/source/MaintainersSection.rst
index 79cc8e336..985e0d6e3 100644
--- a/doc/source/MaintainersSection.rst
+++ b/doc/source/MaintainersSection.rst
@@ -29,28 +29,6 @@ Building ``git-lfs`` with spack isn't straightforward as it requires ``go-bootst
Following this "installation", create modulefile from template ``doc/modulefile_templates/git-lfs``.
-.. _MaintainersSection_Miniconda:
-
-------------------------------
-Miniconda (legacy)
-------------------------------
-
-miniconda can be used to provide a basic version of Python that spack-stack uses to support its Python packages. This is not recommended on configurable systems (user workstations and laptops using GNU compiler) where Python gets installed by spack. But any system using Intel compilers with spack-stack will need an external Python to build ecflow with Python bindings (because ecflow requires a boost serialization function that does **not** work with Intel, a known yet ignored bug), and then both Python and ecflow are presented to spack as external packages. Often, it is possible to use the default (OS) Python if new enough (3.9+), or a module provided by the system administrators. If none of this works, use the following instructions to install a basic Python interpreter using miniconda:
-
-The following is for the example of ``miniconda_ver="py39_4.12.0"`` (for which ``python_ver=3.9.12``) and ``platform="MacOSX-x86_64"`` or ``platform="Linux-x86_64"``
-
-.. code-block:: console
-
- cd /path/to/top-level/spack-stack/
- mkdir -p miniconda-${python_ver}/src
- cd miniconda-${python_ver}/src
- wget https://repo.anaconda.com/miniconda/Miniconda3-${miniconda_ver}-${platform}.sh
- sh Miniconda3-${miniconda_ver}-${platform}.sh -u -b -p /path/to/top-level/spack-stack/miniconda-${python_ver}
- eval "$(/path/to/top-level/spack-stack/miniconda-${python_ver}/bin/conda shell.bash hook)"
- conda install -y -c conda-forge libpython-static
-
-After the successful installation, create modulefile ``/path/to/top-level/spack-stack/modulefiles/miniconda/${python_ver}`` from template ``doc/modulefile_templates/miniconda`` and update ``MINICONDA_PATH`` and the Python version in this file.
-
.. _MaintainersSection_Qt5:
------------------------------
@@ -80,75 +58,6 @@ Sign into qt, select customized installation, choose qt@5.15.2 only (uncheck all
.. note::
If ``./qt-unified-linux-x64-online.run`` fails to start with the error ``qt.qpa.xcb: could not connect to display`` and a role account is being used, follow the procedure described in https://www.thegeekdiary.com/how-to-set-x11-forwarding-export-remote-display-for-users-who-switch-accounts-using-sudo to export the display. A possible warning ``xauth: file /ncrc/home1/role.epic/.Xauthority does not exist`` can be ignored, since this file gets created by the ``xauth`` command.
-.. _MaintainersSection_ecFlow:
-
-------------------------------
-ecFlow (with GUI and Python)
-------------------------------
-
-Building ``ecFlow`` with spack is pretty tricky, because it requires functions from the ``boost`` serialization library that do not build cleanly with the Intel classic compilers (see https://github.com/USCiLab/cereal/issues/606 for a description of the problem of Intel with json cereal). When using the Intel compilers on HPC systems, it is therefore necessary to build ``ecFlow`` with the GNU compilers, preferably the same version that is used as the C++ backend for Intel, outside of spack-stack and make it available as a module. The build of ``ecFlow`` described below links against this ``boost`` library statically, therefore it does not interfere with ``boost`` built by spack-stack for other applications. ``ecFlow`` also uses ``Python3`` and ``qt5``.
-
-.. note::
- Installing ``ecFlow`` with ``conda``, ``brew``, etc. is not recommended, since these install a number of packages as dependencies (e.g. ``numpy``, dynamically-linked ``boost``) that may interfere with the spack software stack.
-
-After loading the required modules for this system (typically the same ``gcc`` used as backend for Intel or for GNU spack-stack builds, ``cmake``, ``qt5``, ``Python3``), follow these instructions to install ecFlow with the graphical user interface (GUI) and Python3 API. See also https://confluence.ecmwf.int/display/ECFLOW/ecflow5.
-
-.. code-block:: console
-
- mkdir -p /lustre/f2/pdata/esrl/gsd/spack-stack/ecflow-5.8.4/src
- cd /lustre/f2/pdata/esrl/gsd/spack-stack/ecflow-5.8.4/src
- wget https://confluence.ecmwf.int/download/attachments/8650755/ecFlow-5.8.4-Source.tar.gz?api=v2
- wget https://boostorg.jfrog.io/artifactory/main/release/1.78.0/source/boost_1_78_0.tar.gz
- mv ecFlow-5.8.4-Source.tar.gz\?api\=v2 ecFlow-5.8.4-Source.tar.gz
- tar -xvzf boost_1_78_0.tar.gz
- tar -xvzf ecFlow-5.8.4-Source.tar.gz
- export WK=/lustre/f2/pdata/esrl/gsd/spack-stack/ecflow-5.8.4/src/ecFlow-5.8.4-Source
- export BOOST_ROOT=/lustre/f2/pdata/esrl/gsd/spack-stack/ecflow-5.8.4/src/boost_1_78_0
-
- # Build static boost (to not interfere with spack-stack boost)
- cd $BOOST_ROOT
- ./bootstrap.sh 2>&1 | tee bootstrap.log
- $WK/build_scripts/boost_build.sh 2>&1 | tee boost_build.log
-
- # Build ecFlow
- cd $WK
- mkdir build
- cd build
- cmake .. -DCMAKE_INSTALL_PREFIX=/lustre/f2/pdata/esrl/gsd/spack-stack/ecflow-5.8.4 2>&1 | tee log.cmake
- make -j4 2>&1 | tee log.make
- make install 2>&1 | tee log.install
-
-Create modulefile ``/lustre/f2/pdata/esrl/gsd/spack-stack/modulefiles/ecflow/5.8.4`` from template ``doc/modulefile_templates/ecflow`` and update ``ECFLOW_PATH`` in this file.
-
-.. note::
- For Cray systems, for example NRL's Narwhal, NOAA's Gaea C5, or NCAR's Derecho, the following modifications are necessary: After extracting the ecflow tarball, edit ``ecFlow-5.8.4-Source/build_scripts/boost_build.sh`` and remove the following lines:
-
-.. code-block:: console
-
- if [ "$PE_ENV" = INTEL ] ; then
- tool=intel
- fi
- if [ "$PE_ENV" = CRAY ] ; then
- tool=cray
- fi
-
-.. note::
- Further on Narwhal, the ``cmake`` command for ``ecbuild`` must be told to use the GNU compilers:
-
-.. code-block:: console
-
- CC=gcc CXX=g++ FC=gfortran cmake .. -DCMAKE_INSTALL_PREFIX=/path/to/ecflow/installation 2>&1 | tee log.cmake
-
-.. note::
- Further, on Gaea C5, one needs to pass the correct ``python3`` executable to the ``cmake`` command:
-
-.. code-block:: console
-
- cmake .. -DPython3_EXECUTABLE=`which python3` -DCMAKE_INSTALL_PREFIX=/path/to/ecflow/installation 2>&1 | tee log.cmake
-
-.. note::
- Finally, on Casper, Derecho, or any other system with ``gcc@12.2.0``, one needs to patch file ``ecflow-5.8.4/src/ecFlow-5.8.4-Source/ACore/src/Passwd.cpp`` by adding ``#include `` below line ``#include "Passwd.hpp"`` before running ``make``.
-
.. _MaintainersSection_MySQL:
-----------------------------------
@@ -209,6 +118,7 @@ If a mirror exists, add new packages to the mirror. Here, ``/path/to/mirror`` is
If this fails with ``git lfs`` errors, check the site config for which module to load for ``git lfs`` support. Load the module, then run the ``spack mirror add`` command, then unload the module and proceed with the installation.
+
==============================
Pre-configuring sites
==============================
@@ -219,7 +129,7 @@ Pre-configuring sites
Preface/general instructions
------------------------------
-Preconfigured sites are defined through spack configuration files in the spack-stack directory ``configs/sites``, for example ``configs/sites/orion``. All files in the site-specific subdirectory will be copied into the environment into ``envs/env-name/site``. Site-specific configurations consist of general definitions (``config.yaml``), packages (``packages.yaml``), compilers (``compilers.yaml``), modules (``modules.yaml``), mirrors (``mirrors.yaml``) etc. These configurations overwrite the common configurations that are copied from ``configs/common`` into ``envs/env-name/common``.
+Preconfigured sites are defined through spack configuration files in the spack-stack directory ``configs/sites``, for example ``configs/sites/orion``. All files in the site-specific subdirectory will be copied into the environment into ``envs/env-name/site``. Site-specific configurations consist of general definitions (``config.yaml``), packages (``packages.yaml``, ``packages_*.yaml``), compilers (``compilers.yaml``), modules (``modules.yaml``), mirrors (``mirrors.yaml``) etc. These configurations overwrite the common configurations that are copied from ``configs/common`` into ``envs/env-name/common``.
The instructions below are platform-specific tasks that only need to be done once and can be reused for new spack environments. To build new environments on preconfigured platforms, follow the instructions in :numref:`Section %s `.
@@ -229,139 +139,27 @@ Note that, for official installations of new environments on any supported platf
spack install --source --verbose
-.. _MaintainersSection_Orion:
-
-------------------------------
-MSU Orion
-------------------------------
-
-On Orion, it is necessary to change the default ``umask`` from ``0027`` to ``0022`` so that users not in the group of the role account can still see and use the software stack. This can be done by running ``umask 022`` after logging into the role account.
-
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `miniconda`, and loading the following modules, follow the instructions in :numref:`Section %s `. Note that the default/system ``qt@5`` can be used on Orion.
-
-.. code-block:: console
-
- module purge
- module load python/3.9.2
- module load cmake/3.22.1
- module load gcc/10.2.0
-
-.. _MaintainersSection_Hercules:
-
-------------------------------
-MSU Hercules
-------------------------------
-
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library, using an available ``Qt5`` installation. After loading the following modules, follow the instructions in :numref:`Section %s ` to install ``ecflow`` in ``/work/noaa/epic/role-epic/spack-stack/hercules/ecflow-5.8.4``. NOTE: do NOT include the ``Qt5`` module dependency in the ``ecflow`` modulefile, as it is only needed at build time (and causes issues with zlib/tar if the depedency is kept in the modulefile).
-
-.. code-block:: console
-
- module purge
- module load qt/5.15.8
-
-openmpi
- Because of difficulties with the default ``openmpi`` on Hercules, we build ``openmpi`` outside of spack and provide it as an external package. It is necessary to load the ``gcc`` compiler module and the ``zlib`` module for consistency. The configuration options are mostly adopted from the default OpenMPI installations that were done by the system administrators using spack (many of them are default values), except that we use internal ``hwloc`` and ``pmix``. Create modulefile ``openmpi`` from template ``doc/modulefile_templates/openmpi``.
-
-.. code-block:: console
-
- ./configure \
- --enable-shared \
- --disable-silent-rules \
- --disable-builtin-atomics \
- --with-pmi=/opt/slurm \
- --enable-static \
- --enable-mpi1-compatibility \
- --without-hcoll \
- --without-psm2 \
- --without-knem \
- --without-verbs \
- --without-psm \
- --without-cma \
- --without-ucx \
- --without-mxm \
- --without-fca \
- --without-xpmem \
- --without-ofi \
- --without-cray-xpmem \
- --without-sge \
- --without-lsf \
- --without-loadleveler \
- --without-alps \
- --without-tm \
- --with-slurm \
- --disable-memchecker \
- --with-pmix=internal \
- --with-zlib=/apps/spack-managed/gcc-12.2.0/zlib-1.2.13-p3sxbyfgvvjy7jx4kizib2jwvhm4s6l4 \
- --with-hwloc=internal \
- --disable-java \
- --disable-mpi-java \
- --with-gpfs=no \
- --without-cuda \
- --enable-wrapper-rpath \
- --disable-wrapper-runpath \
- --disable-mpi-cxx \
- --disable-cxx-exceptions \
- --with-wrapper-ldflags="-Wl,-rpath,/apps/spack-managed/gcc-11.3.1/gcc-12.2.0-7cu3qahzhsxpauy4jlnsbcqmlbkxbbbo/lib/gcc/x86_64-pc-linux-gnu/12.2.0 -Wl,-rpath,/apps/spack-managed/gcc-11.3.1/gcc-12.2.0-7cu3qahzhsxpauy4jlnsbcqmlbkxbbbo/lib64" \
- --prefix=/work/noaa/epic/role-epic/spack-stack/hercules/openmpi-4.1.6/gcc-12.2.0-spack 2>&1 | tee log.config
- make VERBOSE=1 -j4
- make check
- make install
-
.. _MaintainersSection_Discover_SCU16:
------------------------------
NASA Discover SCU16
------------------------------
-On Discover SCU16, ``miniconda``, ``qt``, and ``ecflow`` need to be installed as a one-off before spack can be used. When using the GNU compiler, it is also necessary to build your own ``openmpi`` or other MPI library, which requires adapting the installation to the network hardware and ``slurm`` scheduler.
-
-miniconda
- Follow the instructions in :numref:`Section %s ` to create a basic ``miniconda`` installation and associated modulefile for working with spack. Don't forget to log off and back on to forget about the conda environment.
+On Discover SCU16, ``qt`` needs to be installed as a one-off before spack can be used. When using the GNU compiler, it is also necessary to build your own ``openmpi`` or other MPI library, which requires adapting the installation to the network hardware and ``slurm`` scheduler.
qt (qt@5)
The default ``qt@5`` in ``/usr`` is incomplete and thus insufficient for building ``ecflow``. After loading/unloading the modules as shown below, refer to
:numref:`Section %s ` to install ``qt@5.15.2`` in ``/discover/swdev/jcsda/spack-stack/scu16/qt-5.15.2`` (note: it is currently installed in ``/discover/swdev/jcsda/spack-stack/qt-5.15.2``; an upcoming large system update will require is to rebuild anyway).
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `miniconda`, `qt5`, and loading the following modules, follow the instructions in :numref:`Section %s `.
-
-.. code-block:: console
-
- module purge
- module load cmake/3.28.2
- module load comp/gcc/12.1.0
- module use /discover/swdev/jcsda/spack-stack/modulefiles
- module load miniconda/3.10.13
- module load qt/5.15.2
-
.. _MaintainersSection_Discover_SCU17:
------------------------------
NASA Discover SCU17
------------------------------
-On Discover SCU17 ``ecflow`` needs to be installed as a one-off before spack can be used.
+On Discover SCU17, ``qt`` needs to be installed as a one-off before spack can be used.
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After loading the following modules, follow the instructions in :numref:`Section %s ` (cont'd below).
-
-.. code-block:: console
-
- module purge
- module load cmake/3.28.2
-
-The following workaround is required after installing ``ecflow`` and creating the modulefile: edit ``path/to/ecflow/bin/ecflow_ui`` and change the last few lines to (i.e. prepend the ``LD_PRELOAD`` command):
-
-.. code-block:: console
-
- if [ $ECFLOWUI_BT != "no" ]
- then
- LD_PRELOAD=/usr/lib64/libstdc++.so.6 catchsegv ${ECFLOWUI_USER_START_CMD} "$exe"
- else
- LD_PRELOAD=/usr/lib64/libstdc++.so.6 ${ECFLOWUI_USER_START_CMD} "$exe"
- fi
+**These instructions are missing. The current `qt` used in the SCU17 site config does not work (/usr/local/other/xpdf/4.04/Deps).**
.. _MaintainersSection_Narwhal:
@@ -369,7 +167,7 @@ The following workaround is required after installing ``ecflow`` and creating th
NAVY HPCMP Narwhal
------------------------------
-On Narwhal, ``git-lfs``, ``qt``, and ``ecflow`` need to be installed as a one-off before spack can be used. Also, temporarily it is necessary to install ``node.js`` as an external package to work around build errors for ``py-jupyter-server`` (see https://github.com/JCSDA/spack-stack/issues/928 and https://github.com/spack/spack/issues/41899).
+On Narwhal, ``git-lfs`` and ``qt`` need to be installed as a one-off before spack can be used. Also, temporarily it is necessary to install ``node.js`` as an external package to work around build errors for ``py-jupyter-server`` (see https://github.com/JCSDA/spack-stack/issues/928 and https://github.com/spack/spack/issues/41899).
git-lfs
The following instructions install ``git-lfs`` in ``/p/app/projects/NEPTUNE/spack-stack/git-lfs-2.10.0``. Version 2.10.0 is the default version for Narwhal. First, download the ``git-lfs`` RPM on a system with full internet access (e.g., Derecho) using ``wget https://download.opensuse.org/repositories/openSUSE:/Leap:/15.2/standard/x86_64/git-lfs-2.10.0-lp152.1.2.x86_64.rpm`` and copy this file to ``/p/app/projects/NEPTUNE/spack-stack/git-lfs-2.10.0/src``. Then switch to Narwhal and run the following commands.
@@ -399,24 +197,6 @@ qt (qt@5)
module load gcc/10.3.0
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `qt5`, and loading the following modules, follow the instructions in :numref:`Section %s ` to install ``ecflow`` in ``/p/app/projects/NEPTUNE/spack-stack/ecflow-5.8.4``. Ensure to follow the extra instructions in that section for Narwhal.
-
-.. code-block:: console
-
- module unload PrgEnv-cray
- module load PrgEnv-intel/8.1.0
- module unload intel
-
- module unload cray-python
- module load cray-python/3.9.7.1
- module unload cray-libsci
- module load cray-libsci/22.08.1.1
-
- module load gcc/10.3.0
- module use /p/app/projects/NEPTUNE/spack-stack/modulefiles
- module load qt/5.15.2
-
node.js
``node.js`` is difficult to install via ``spack``, but is needed to install certain Python packages. The complication is that when using a newer ``gcc`` compiler (either directly or as backend for ``icc`` etc.), the OS ``node.js`` errors out with unresolved symbols in the ``libstdc++`` library. Therefore, we need to install ``node.js`` with ``gcc@10.3.0`` loaded, and create modulefile ``node.js/20.10.0`` from template ``modulefiles/node.js``.
@@ -437,42 +217,6 @@ node.js
make 2>&1 | tee log.make
make install 2>&1 | tee log.install
-.. _MaintainersSection_Nautilus:
-
-------------------------------
-NAVY HPCMP Nautilus
-------------------------------
-
-On Nautilus, ``ecflow`` must be installed as a one-off before spack can be used.
-
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After loading the following modules, follow the instructions in :numref:`Section %s ` to install ``ecflow`` in ``/p/app/projects/NEPTUNE/spack-stack/ecflow-5.8.4``.
-
-.. code-block:: console
-
- module purge
-
- module load slurm
- module load amd/aocc/4.0.0
- module load amd/aocl/aocc/4.0
-
-.. _MaintainersSection_Casper:
-
-------------------------------
-NCAR-Wyoming Casper
-------------------------------
-
-On Casper, there are problems with newer versions of the Intel compiler/MPI library when trying to run MPI jobs with just one task (``mpiexec -np 1``) - for JEDI, job hangs forever in a particular MPI communication call in oops. This is why an older version Intel 19 is used here.
-
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After loading the following modules, follow the instructions in :numref:`Section %s `.
-
-.. code-block:: console
-
- module purge
- export LMOD_TMOD_FIND_FIRST=yes
- module load gnu/12.2.0
-
.. _MaintainersSection_Derecho:
------------------------------
@@ -485,31 +229,13 @@ libfabric (temporary)
cray-pals (temporary)
Until CISL fixes its unusual way of setting up Cray module environments, it is necessary to create a cray-pals (parallel application launcher) module to be able to find ``mpirun`` etc. Create directory ``/glade/work/epicufsrt/contrib/spack-stack/derecho/cray-pals`` and copy file ``/opt/cray/pe/lmod/modulefiles/core/cray-pals/1.2.11.lua`` into this directory.
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After loading the following modules, follow the instructions in :numref:`Section %s ` to install ``ecflow``. Be sure to follow the extra instructions for Derecho in that section.
-
-.. code-block:: console
-
- module purge
- export LMOD_TMOD_FIND_FIRST=yes
- module load gcc/12.2.0
- module load cmake/3.26.3
-
-.. _MaintainersSection_WCOSS2:
-
-------------------------------
-NOAA NCO WCOSS2
-------------------------------
-
-**WORK IN PROGRESS**
-
.. _MaintainersSection_Parallel_Works:
----------------------------------------
NOAA Parallel Works (AWS, Azure, Gcloud)
----------------------------------------
-See ``configs/sites/noaa-aws/README.md``. These instructions are identical for all three vendors.
+See ``configs/sites/noaa-{aws,azure,gcloud}/README.md``.
.. _MaintainersSection_GaeaC5:
@@ -517,7 +243,7 @@ See ``configs/sites/noaa-aws/README.md``. These instructions are identical for a
NOAA RDHPCS Gaea C5
------------------------------
-On Gaea C5, ``miniconda``, ``qt``, and ``ecflow`` need to be installed as a one-off before spack can be used.
+On Gaea C5, ``qt`` needs to be installed as a one-off before spack can be used.
qt (qt@5)
The default ``qt@5`` in ``/usr`` is incomplete and thus insufficient for building ``ecflow``. After loading/unloading the modules as shown below, refer to :numref:`Section %s ` to install ``qt@5.15.2`` in ``/ncrc/proj/epic/spack-stack/qt-5.15.2``. :numref:`Section %s ` describes how to export the X windows environment in order to install ``qt@5`` using the role account.
@@ -528,45 +254,12 @@ qt (qt@5)
module load gcc/10.3.0
module load PrgEnv-gnu/8.3.3
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `qt5` and loading the following modules, follow the instructions in :numref:`Section %s `. Because of the dependency on ``miniconda``, that module must be loaded automatically in the ``ecflow`` module (similar to ``qt@5.15.2-c5``). Ensure to follow the extra instructions in that section for Gaea C5 in ``/ncrc/proj/epic/spack-stack/ecflow-5.8.4``.
-
- Ensure to follow the extra instructions in that section for Gaea.
-
-.. code-block:: console
-
- module load PrgEnv-gnu/8.3.3
- module use /ncrc/proj/epic/spack-stack/modulefiles/
- module load qt/5.15.2
- module load python/3.9.12
- module load cmake/3.23.1
-
.. _MaintainersSection_Hera:
------------------------------
NOAA RDHPCS Hera
------------------------------
-On Hera, ``miniconda`` must be installed as a one-off before spack can be used. When using the GNU compiler, it is also necessary to build your own ``openmpi`` or other MPI library.
-
-miniconda
- Follow the instructions in :numref:`Section %s ` to create a basic ``miniconda`` installation and associated modulefile for working with spack. Don't forget to log off and back on to forget about the conda environment.
-
-openmpi
- It is easier to build and test ``openmpi`` manually and use it as an external package, instead of building it as part of spack-stack. These instructions were used to build the ``openmpi@4.1.5`` MPI library with ``gcc@9.2.0`` as referenced in the Hera site config. After the installation, create modulefile `openmpi/4.1.5` using the template ``doc/modulefile_templates/openmpi``. Note the site-specific module settings at the end of the template, this will likely be different for other HPCs.
-
-.. code-block:: console
-
- module purge
- module load gnu/9.2.0
- ./configure \
- --prefix=/scratch1/NCEPDEV/jcsda/jedipara/spack-stack/openmpi-4.1.5 \
- --with-pmi=/apps/slurm/default \
- --with-lustre
- make VERBOSE=1 -j4
- make check
- make install
-
Hera sits behind the NOAA firewall and doesn't have access to all packages on the web. It is therefore necessary to create a spack mirror on another platform. This can be done as described in section :numref:`Section %s ` for air-gapped systems.
.. _MaintainersSection_Jet:
@@ -575,42 +268,9 @@ Hera sits behind the NOAA firewall and doesn't have access to all packages on th
NOAA RDHPCS Jet
------------------------------
-Note that the ``target`` architecture for Jet must be set to ``core2`` to satisfy differences between the various Jet partitions and ensure that installations run on the front-end nodes (xjet-like) will function on the other partitions.
-
-miniconda
- Follow the instructions in :numref:`Section %s ` to create a basic ``miniconda`` installation and associated modulefile for working with spack. Don't forget to log off and back on to forget about the conda environment.
-
-.. code-block:: console
-
- module use /lfs4/HFIP/hfv3gfs/spack-stack/modulefiles
- module load miniconda/3.9.12
- # Need a newer gcc compiler than the default OS compiler gcc-4.8.5
- module load gnu/9.2.0
-
-.. _MaintainersSection_S4:
-
-------------------------------
-UW (Univ. of Wisconsin) S4
-------------------------------
-
-gnu (module only)
- The ``gnu/9.3.0`` module provided by the system administrators is broken. To create a usable version, turn ``/data/prod/hpc-stack/modulefiles/core/gnu/9.3.0.lua`` into a simple environment module (``tcl``) in ``/data/prod/jedi/spack-stack/modulefiles/gnu``.
-
-mpich (module only)
- The ``mpich/4.0.1`` module provided by the system administrators is broken. To create a usable version, turn ``/data/prod/hpc-stack/modulefiles/compiler/gnu/9.3.0/mpich/4.0.1.lua`` into a simple environment module (``tcl``) in ``/data/prod/jedi/spack-stack/modulefiles/mpich``.
+On Jet, the ``target`` architecture must be set to ``core2`` to satisfy differences between the various Jet partitions and ensure that installations run on the front-end nodes (xjet-like) will function on the other partitions.
-miniconda
- Follow the instructions in :numref:`Section %s ` to create a basic ``miniconda`` installation and associated modulefile for working with spack. Don't forget to log off and back on to forget about the conda environment.
-
-ecflow
- ``ecFlow`` must be built manually using the GNU compilers and linked against a static ``boost`` library. After installing `miniconda`, and loading the following modules, follow the instructions in :numref:`Section %s `.
-
-.. code-block:: console
-
- module purge
- module use /data/prod/jedi/spack-stack/modulefiles
- module load miniconda/3.9.12
- module load gcc/9.3.0
+Like Hera, Jet sits behind the NOAA firewall and doesn't have access to all packages on the web. It is therefore necessary to create a spack mirror on another platform. This can be done as described in section :numref:`Section %s ` for air-gapped systems.
.. _MaintainersSection_AWS_Pcluster_Ubuntu:
@@ -620,6 +280,7 @@ Amazon Web Services Parallelcluster Ubuntu 20.04
See ``configs/sites/aws-pcluster/README.md``.
+
.. _MaintainersSection_Testing_New_Packages:
.. _MaintainersSection_spack_mirrors:
diff --git a/doc/source/NewSiteConfigs.rst b/doc/source/NewSiteConfigs.rst
index 65b59b6b0..04ec87431 100644
--- a/doc/source/NewSiteConfigs.rst
+++ b/doc/source/NewSiteConfigs.rst
@@ -32,8 +32,7 @@ The instructions below are for GNU (`gcc`), since this is the easiest and best s
We have noted problems on some - not all - platforms with ``intel@2021.5.0`` when we switched from ``zlib`` to ``zlib-ng`` in spack-stack-1.7.0. These issues went away when using a different version of the compiler (anything between 2021.3.0 and 2021.11.0). It is therefore recommended to avoid using ``intel@2021.5.0`` unless it is the only option.
.. [#fn2]
- Note that ``apple-clang@14.x`` compiler versions are fully supported, and ``apple-clang@15.0.0`` will work but requires the :ref:`workaround noted below`.
- Also, when using ``apple-clang@15.0.0`` you must use Command Line Tools version 15.1, and the Command Line Tools versions 15.3 and newer are not yet supported.
+ Note that ``apple-clang@14.x`` and ``apple-clang@15.x`` compiler versions are fully supported, and when using ``apple-clang@15.x`` the :ref:`workaround noted below` is required.
.. [#fn3]
Support for Nvidia compilers is experimental and limited to a subset of packages. Please refer to :numref:`Section %s ` below.
@@ -175,7 +174,8 @@ These instructions are meant to be a reference that users can follow to set up t
Make sure you upgrade cmake in homebrew.
.. code-block:: console
- brew upgrade cmake
+
+ brew upgrade cmake
4. Configure your terminal to use the homebrew installed bash
@@ -284,8 +284,8 @@ Remember to activate the ``lua`` module environment and have MacTeX in your sear
.. _apple-clang-15-workaround:
.. note::
- When using apple-clang@15.0.0 (or newer) compilers, you need to manually add the following ldflags spec in the `site/compilers.yaml` file.
- There are known issues with new features in the Apple linker/loader that comes with the 15.0.0 compiler set, and this change tells the linker/loader to use its legacy features which work fine.
+ When using apple-clang@15.x (or newer) compilers, you need to manually add the following ldflags spec in the `site/compilers.yaml` file.
+ There are known issues with new features in the Apple linker/loader that comes with the 15.x compiler set, and this change tells the linker/loader to use its legacy features which work fine.
.. code-block:: yaml
:emphasize-lines: 9,10
@@ -671,6 +671,7 @@ With all of that in mind, the following instructions were used on an Amazon Web
3. Load the correct module shipped with ``nvhpc-24-3``. Note that this is only required for ``spack`` to detect the compiler and ``openmpi`` library during the environment configuration below. It is not required when using the new environment to compile code.
.. code-block:: console
+
module purge
module use /opt/nvidia/hpc_sdk/modulefiles
module load nvhpc-openmpi3/24.3
diff --git a/doc/source/PreConfiguredSites.rst b/doc/source/PreConfiguredSites.rst
index b62804dd6..c4962ecbc 100644
--- a/doc/source/PreConfiguredSites.rst
+++ b/doc/source/PreConfiguredSites.rst
@@ -43,9 +43,9 @@ Pre-configured sites (tier 1)
+=====================+=======================+====================+========================================================+=================+
| **HPC platforms** |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
-| | Hercules | GCC, Intel | ``/work/noaa/epic/role-epic/spack-stack/hercules/`` | EPIC / JCSDA |
+| | Hercules | GCC, Intel | ``/apps/contrib/spack-stack/`` | EPIC / JCSDA |
| MSU +-----------------------+--------------------+--------------------------------------------------------+-----------------+
-| | Orion | GCC, Intel | ``/work/noaa/epic/role-epic/spack-stack/orion/`` | EPIC / JCSDA |
+| | Orion | GCC, Intel | ``/apps/contrib/spack-stack/`` | EPIC / JCSDA |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
| | Discover SCU16 | GCC, Intel | ``/gpfsm/dswdev/jcsda/spack-stack/scu16/`` | JCSDA |
| NASA +-----------------------+--------------------+--------------------------------------------------------+-----------------+
@@ -64,10 +64,12 @@ Pre-configured sites (tier 1)
| | Jet | GCC, Intel | ``/contrib/spack-stack`` | EPIC / NOAA-EMC |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
| | Narwhal | GCC, Intel, oneAPI | ``/p/app/projects/NEPTUNE/spack-stack/`` | NRL |
-| U.S. Navy (HPCMP) +-----------------------+--------------------+--------------------------------------------------------+-----------------+
-| | Nautilus | Intel | ``/p/app/projects/NEPTUNE/spack-stack/`` | NRL |
+| +-----------------------+--------------------+--------------------------------------------------------+-----------------+
+| U.S. Navy (HPCMP) | Nautilus | GCC, Intel, oneAPI | ``/p/app/projects/NEPTUNE/spack-stack/`` | NRL |
+| +-----------------------+--------------------+--------------------------------------------------------+-----------------+
+| | Blueback (earlyaccess)| GCC, oneAPI | (experimental only, no project directories yet | NRL |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
-| Univ. of Wisconsin | S4 | Intel | ``/data/prod/jedi/spack-stack/`` | JCSDA |
+| Univ. of Wisconsin | S4 | Intel | ``/data/prod/jedi/spack-stack/`` | SSEC |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
| **Cloud platforms** |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
@@ -77,6 +79,9 @@ Pre-configured sites (tier 1)
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
| NOAA (RDHPCS) | RDHPCS Parallel Works | Intel | ``/contrib/spack-stack-rocky8/`` | EPIC / JCSDA |
+---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
+| U.S. Navy (HPCMP) | HPCMP Parallel Works | GCC | ``/contrib/spack-stack/`` | NRL |
++---------------------+-----------------------+--------------------+--------------------------------------------------------+-----------------+
+
.. _Preconfigured_Sites_Orion:
@@ -88,6 +93,8 @@ The following is required for building new spack environments with any supported
.. code-block:: console
+ # To access /apps/contrib/spack-stack directory/, first login to orion-devel-1 or orion-devel-2 login node.
+ # Then sudo to role-epic account.
module purge
@@ -101,8 +108,9 @@ The following is required for building new spack environments with any supported
.. code-block:: console
+ # To access /apps/contrib/spack-stack directory/, first login to orion-devel-1 or orion-devel-2 login node.
+ # Then sudo to role-epic account.
module purge
- # No need to load modules, spack-stack-1.8.0 have no dependences
.. _Preconfigured_Sites_Discover_SCU16:
@@ -164,6 +172,30 @@ The following is required for building new spack environments with Intel on this
module unload cray-libsci
module load cray-libsci/23.05.1.4
+The following is required for building new spack environments with Intel oneAPI on this platform.. Don't use ``module purge`` on Narwhal!
+
+.. code-block:: console
+
+ umask 0022
+ module unload PrgEnv-cray
+ module load PrgEnv-intel/8.4.0
+ module unload intel
+ module load intel/2024.2
+ module unload cray-mpich
+ module unload craype-network-ofi
+ # Warning. Do not load craype-network-ucx
+ # or cray-mpich-ucx/8.1.21!
+ # There is a bug in the modulefile that prevents
+ # spack from setting the environment for its
+ # build steps when the module is already
+ # loaded. Instead, let spack load it when the
+ # package requires it.
+ #module load craype-network-ucx
+ #module load cray-mpich-ucx/8.1.21
+ module load libfabric/1.12.1.2.2.1
+ module unload cray-libsci
+ module load cray-libsci/23.05.1.4
+
The following is required for building new spack environments with GNU on this platform.. Don't use ``module purge`` on Narwhal!
.. code-block:: console
@@ -203,6 +235,59 @@ The following is required for building new spack environments with any supported
module purge
+.. _Preconfigured_Sites_Blueback:
+
+------------------------------
+NAVY HPCMP Blueback
+------------------------------
+
+The following is required for building new spack environments with Intel oneAPI on this platform.. Don't use ``module purge`` on Blueback!
+
+.. code-block:: console
+
+ umask 0022
+ module unload PrgEnv-cray
+ module load PrgEnv-intel/8.4.0
+ module unload intel
+ module load intel/2024.2
+ module unload cray-mpich
+ module unload craype-network-ofi
+ # Warning. Do not load craype-network-ucx
+ # or cray-mpich-ucx/8.1.21!
+ # There is a bug in the modulefile that prevents
+ # spack from setting the environment for its
+ # build steps when the module is already
+ # loaded. Instead, let spack load it when the
+ # package requires it.
+ #module load craype-network-ucx
+ #module load cray-mpich-ucx/8.1.21
+ module load libfabric/1.12.1.2.2.1
+ module unload cray-libsci
+ module load cray-libsci/23.05.1.4
+
+The following is required for building new spack environments with GNU on this platform.. Don't use ``module purge`` on Blueback!
+
+ umask 0022
+ module unload PrgEnv-cray
+ module load PrgEnv-gnu/8.4.0
+ module unload gcc
+ module load gcc/12.1.0
+ module unload cray-mpich
+ module unload craype-network-ofi
+ # Warning. Do not load craype-network-ucx
+ # or cray-mpich-ucx/8.1.21!
+ # There is a bug in the modulefile that prevents
+ # spack from setting the environment for its
+ # build steps when the module is already
+ # loaded. Instead, let spack load it when the
+ # package requires it.
+ #module load craype-network-ucx
+ #module load cray-mpich-ucx/8.1.21
+ module load libfabric/1.12.1.2.2.1
+ module unload cray-libsci
+ module load cray-libsci/23.05.1.4
+
+
.. _Preconfigured_Sites_Derecho:
--------------------
@@ -235,6 +320,7 @@ Note that for the installation using Intel 19, the system GCC, 7.5.0, is used on
.. note::
System-wide ``spack`` software installations are maintained by NCO on this platform, which are not associated with spack-stack.
+
.. _Preconfigured_Sites_Parallel_Works:
----------------------------------------
@@ -248,6 +334,21 @@ The following is required for building new spack environments with any supported
module purge
+.. _Preconfigured_Sites_Parallel_Works_Navy:
+
+----------------------------------------
+U.S. Navy Parallel Works (AWS)
+----------------------------------------
+
+The following is required for building new spack environments with GNU on this platform.
+
+.. code-block:: console
+
+ umask 0022
+ module purge
+ scl enable gcc-toolset-13 bash
+
+
.. _Preconfigured_Sites_Gaea_C5:
------------------------------
diff --git a/doc/source/UsingSpackEnvironments.rst b/doc/source/UsingSpackEnvironments.rst
index a22dd9021..21d279c0b 100644
--- a/doc/source/UsingSpackEnvironments.rst
+++ b/doc/source/UsingSpackEnvironments.rst
@@ -5,6 +5,17 @@ Using spack-stack environments
The following tutorial assumes you have a functioning spack-stack environment installed local to your system. This environment is provided on platforms described in :numref:`Section %s `. If you intend to run spack-stack on your developer machine or on a new platform, you can create an environment using the steps described in :numref:`Section %s `.
+There are three steps in setting up a usable development environment.
+The first is to load the spack-stack environment and the second is to create a python virtual environment that is based on the python executable included within the spack-stack installation.
+The reason for the python virtual environment is to ensure that python based applications are utilizing the spack-stack python modules in a consistent manner.
+The third step is to configure your build system to use the python virtual environment created in the second step.
+
+When using a spack-stack environment please utilize the spack-stack installed python modules as much as possible to help maintain the consistency mentioned above.
+Note that after loading the spack-stack environment, all of the spack-stack installed python modules have been added to :code:`PYTHONPATH` so they are immediately accessable in your spack-stack based python virtual environment.
+
+Load the spack-stack environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
Spack environments are used by loading the modulefiles generated at the end of the installation process. These modules control the unix environment and allow CMake, ecbuild, and other build toolchains to resolve the version of software intended for the compilation task. The ``spack`` command itself is not needed in this setup, hence the instructions for creating new environments (``source setup.sh`` etc.) can be ignored. The following is sufficient for loading the modules, allowing them to be used while compiling and running user code.
.. note::
@@ -28,3 +39,78 @@ Now list all available modules via ``module available``. You may be required to
.. note::
When using ``lua`` modules, loading a different module will automatically switch the dependency modules. This is not the case for ``tcl`` modules. For the latter, it is recommended to start over with a clean shell and repeat the above steps.
+
+Create and activate a python virtual environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It is important that the creation of the python virtual environment be based on the python executable from the spack-stack installation.
+This ensures consistency for python applications between the python executable and the spack-stack installed python packages (eg., numpy).
+Without this consistency, it is easy for the wrong underlying library versions to get dynamically loaded and cause problems with applications crashing.
+
+After the :code:`module load stack-python-name/python-version` command is run, the environment variable :code:`python_ROOT` will be set to the path where the spack-stack installed python version is located.
+The :code:`python_ROOT` variable can be used to ensure that you get the proper virtual environment set as shown here:
+
+.. code-block:: console
+
+ ${python_ROOT}/bin/python3 -m venv
+
+Once the virtual environment is set, it must be activated:
+
+.. code-block:: console
+
+ source /bin/activate
+
+and after activation the spack-stack python executable will be the first one in your PATH.
+The implication of this is that you should activate the python virtual enviroment as the last step in setting up your environment to ensure that the path to the virtual environment python remains first in your PATH. Here is an example of the whole process:
+
+.. code-block:: console
+
+ # start from clean slate
+ module purge
+
+ # load the base packages from the spack-stack environment
+ module use $SPACK_STACK_GNU_ENV/install/modulefiles/Core
+ module load stack-gcc/12.2.0
+ module load stack-openmpi/4.1.4
+ module load stack-python/3.11.7
+
+ # load the additional environments required for your
+ # target application
+ module load jedi-fv3-env
+ module load ewok-env
+ module load soca-env
+
+ # Create and activate the spack-stack based python
+ # virtual environment
+ # Note that you only need to create the virtual environment
+ # the first time. Once created you only need to activate
+ # the virtual environment.
+ cd $HOME/projects/jedi
+ ${python_ROOT}/bin/python3 -m venv jedi_py_venv # first time only
+ source jedi_py_venv/bin/activate
+
+Configure build system to utilize the python virtual environment
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Configuring your application build system to use the python virtual environment will continue the goal of consistency mentioned above where all python scripts and packages within the target application are based on the spack-stack built python executable and packages.
+
+There are a variety of build systems in use, and CMake is quite commonly used so CMake will be used as an example for this step.
+The CMake variable :code:`Python3_FIND_STRATEGY` can be used in conjunction with the python virtual environment to direct CMake to find and use the desired python virtual environment.
+By default CMake chooses the latest python installation regardless of which comes first in your PATH.
+By setting :code:`Python3_FIND_STRATEGY=LOCATION`, CMake will instead find and use the first python installation found in your PATH.
+This is the reason for making the spack-stack based python virtual environment first in PATH in the step above.
+
+:code:`Python3_FIND_STRATEGY` can be set in two ways: the first in the project's top-level CMakeLists.txt file and the second on the cmake (or ecbuild) command line.
+Here are examples of both methods:
+
+.. code-block:: console
+
+ # In CMakeLists.txt
+ set( Python3_FIND_STRATEGY LOCATION )
+
+.. code-block:: console
+
+ # On the command line
+ cmake -DPython3_FIND_STRATEGY=LOCATION ...
+
+
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 8f33a6da8..d48e7e8aa 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -51,12 +51,9 @@
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
- 'sphinx.ext.napoleon',
- 'sphinxcontrib.bibtex'
+ 'sphinx.ext.napoleon'
]
-bibtex_bibfiles = []
-
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
@@ -124,37 +121,6 @@
htmlhelp_basename = 'spack-stack'
-# -- Options for LaTeX output ------------------------------------------------
-
-latex_engine = 'pdflatex'
-latex_elements = {
- # The paper size ('letterpaper' or 'a4paper').
- #
- # 'papersize': 'letterpaper',
-
- # The font size ('10pt', '11pt' or '12pt').
- #
- # 'pointsize': '10pt',
-
- # Additional stuff for the LaTeX preamble.
- #
- # 'preamble': '',
-
- # Latex figure (float) alignment
- #
- # 'figure_align': 'htbp',
- 'maketitle': r'\newcommand\sphinxbackoftitlepage{For referencing this document please use: \newline \break Heinzeller, D., A. Richert, C. Book, E. Hartnett, H. Lei, N. Perlin, R. Vasic, S. Herbener, 2024. spack-stack documentation develop. Available at https://spack-stack.readthedocs.io/\textunderscore/downloads/en/latest/pdf/.}\sphinxmaketitle'
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title,
-# author, documentclass [howto, manual, or own class]).
-latex_documents = [
- (master_doc, 'spack-stack.tex', 'spack-stack documentation',
- author,'manual'),
-]
-
-
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
diff --git a/spack b/spack
index 6f9a6ba4e..b274d591f 160000
--- a/spack
+++ b/spack
@@ -1 +1 @@
-Subproject commit 6f9a6ba4e5850494ffb7e7d592fe982036c32fb9
+Subproject commit b274d591fc82d38609124e3fe6f8ef1753357947
diff --git a/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py b/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py
index e2d75b651..a5ce92173 100755
--- a/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py
+++ b/spack-ext/lib/jcsda-emc/spack-stack/stack/meta_modules.py
@@ -361,7 +361,10 @@ def setup_meta_modules():
# Determine the preferred compiler and sort the flattened list of compilers
# such that the preferred compiler comes last. This is so that all other
# compilers populate the MODULEPATHS_SAVE list before the preferred compiler
- # takes it and adds it to the stack-COMPILER metamodule.
+ # takes it and adds it to the stack-COMPILER metamodule. Likewise, we need
+ # to save the list of compiler substitutions from the preferred compiler
+ # so that we have access to it when we build the MPI meta module. Note that
+ # by definition, only the preferred compiler can be used for MPI dependencies.
try:
preferred_compilers = spack.config.get("packages")["all"]["prefer"]
except:
@@ -386,6 +389,8 @@ def custom_sort_key(entry):
compiler_config = spack.config.get("compilers")
# Collect and save modulepaths for the preferred compiler
MODULEPATHS_SAVE = []
+ # Initialize saved substitutes to None (populate for preferred compiler later)
+ SUBSTITUTES_SAVE = None
for compiler_identifier in sorted_flattened_compiler_list:
(compiler_name, compiler_version) = compiler_identifier.replace("@=","@").split("@")
# Loop through all configured compilers and find the correct match
@@ -523,6 +528,10 @@ def custom_sort_key(entry):
with open(compiler_module_file, "w") as f:
f.write(module_content)
logging.info(" ... writing {}".format(compiler_module_file))
+ # If this is the last compiler in the list (i.e. the preferred compiler),
+ # then save the substitutes for later use for building the MPI meta module.
+ if compiler_identifier == sorted_flattened_compiler_list[-1]:
+ SUBSTITUTES_SAVE = substitutes
del MODULEPATHS_SAVE
# Create mpi modules
@@ -685,7 +694,18 @@ def custom_sort_key(entry):
)
# Compiler wrapper environment variables
- if "intel" in mpi_name:
+ if "intel" in mpi_name and compiler_name == "oneapi":
+ substitutes["MPICC"] = os.path.join("mpiicx")
+ substitutes["MPICXX"] = os.path.join("mpiicpx")
+ if "ifx" in SUBSTITUTES_SAVE["FC"] and not "ifort" in SUBSTITUTES_SAVE["FC"]:
+ substitutes["MPIF77"] = os.path.join("mpiifx")
+ substitutes["MPIF90"] = os.path.join("mpiifx")
+ elif not "ifx" in SUBSTITUTES_SAVE["FC"] and "ifort" in SUBSTITUTES_SAVE["FC"]:
+ substitutes["MPIF77"] = os.path.join("mpiifort")
+ substitutes["MPIF90"] = os.path.join("mpiifort")
+ else:
+ raise Exception(f"For {mpi_name}, cannot determine MPI wrapper from FC={SUBSTITUTES_SAVE['FC']}")
+ elif "intel" in mpi_name and compiler_name == "intel":
substitutes["MPICC"] = os.path.join("mpiicc")
substitutes["MPICXX"] = os.path.join("mpiicpc")
substitutes["MPIF77"] = os.path.join("mpiifort")
@@ -772,6 +792,7 @@ def custom_sort_key(entry):
with open(mpi_module_file, "w") as f:
f.write(module_content)
logging.info(" ... writing {}".format(mpi_module_file))
+ del SUBSTITUTES_SAVE
# Create python modules. Need to accommodate both external
# Python distributions and spack-built Python distributions.
diff --git a/spack-ext/repos/spack-stack/packages/neptune-env/package.py b/spack-ext/repos/spack-stack/packages/neptune-env/package.py
index f79968eac..ff7aef9d4 100644
--- a/spack-ext/repos/spack-stack/packages/neptune-env/package.py
+++ b/spack-ext/repos/spack-stack/packages/neptune-env/package.py
@@ -1,4 +1,4 @@
-# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -9,7 +9,7 @@
class NeptuneEnv(BundlePackage):
- """Development environment for neptune standalone"""
+ """Development environment for NEPTUNE standalone"""
# Fake URL
homepage = "https://github.com/notavalidaccount/neptune"
@@ -17,11 +17,9 @@ class NeptuneEnv(BundlePackage):
maintainers("climbfuji", "areinecke")
- version("1.4.0")
+ version("1.5.0")
- variant("python", default=True, description="Build Python dependencies")
- variant("espc", default=True, description="Build ESPC dependencies")
- variant("xnrl", default=True, description="Build XNRL and its extra Python dependencies")
+ variant("espc", default=False, description="Build ESPC dependencies")
depends_on("base-env", type="run")
@@ -33,7 +31,6 @@ class NeptuneEnv(BundlePackage):
depends_on("libyaml", type="run")
depends_on("p4est", type="run")
depends_on("w3emc", type="run")
- depends_on("w3nco", type="run")
depends_on("sp", type="run", when="%aocc")
depends_on("ip@5:", type="run", when="%apple-clang")
depends_on("ip@5:", type="run", when="%gcc")
@@ -43,28 +40,13 @@ class NeptuneEnv(BundlePackage):
depends_on("nco", type="run")
depends_on("mct", type="run")
- conflicts("+xnrl", when="~python", msg="Variant xnrl requires variant python")
-
with when("+espc"):
depends_on("fftw", type="build")
depends_on("netlib-lapack", type="build")
- with when("+python"):
- depends_on("py-f90nml", type="run")
- depends_on("py-h5py", type="run")
- depends_on("py-netcdf4", type="run")
- depends_on("py-pandas", type="run")
- depends_on("py-pycodestyle", type="run")
- depends_on("py-pybind11", type="run")
- depends_on("py-pyhdf", type="run")
- depends_on("py-python-dateutil", type="run")
- depends_on("py-pyyaml", type="run")
- depends_on("py-scipy", type="run")
- depends_on("py-xarray", type="run")
- depends_on("py-pytest", type="run")
- depends_on("py-fortranformat", type="run")
-
- with when("+xnrl"):
- depends_on("py-xnrl", type="run")
+ # Basic Python dependencies that are always needed
+ depends_on("py-f90nml", type="run")
+ depends_on("py-python-dateutil", type="run")
+ depends_on("py-pyyaml", type="run")
# There is no need for install() since there is no code.
diff --git a/spack-ext/repos/spack-stack/packages/neptune-python-env/package.py b/spack-ext/repos/spack-stack/packages/neptune-python-env/package.py
new file mode 100644
index 000000000..2e20105b8
--- /dev/null
+++ b/spack-ext/repos/spack-stack/packages/neptune-python-env/package.py
@@ -0,0 +1,43 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import sys
+
+from spack.package import *
+
+
+class NeptunePythonEnv(BundlePackage):
+ """Development environment for NEPTUNE standalone with all Python dependencies"""
+
+ # Fake URL
+ homepage = "https://github.com/notavalidaccount/neptune"
+ git = "https://github.com/notavalidaccount/neptune.git"
+
+ maintainers("climbfuji", "areinecke")
+
+ version("1.5.0")
+
+ variant("xnrl", default=False, description="Build non-pulic XNRL")
+
+ depends_on("neptune-env", type="run")
+ # Enable the Python variant for ESMF
+ depends_on("esmf +python", type="run")
+
+ depends_on("py-h5py", type="run")
+ depends_on("py-netcdf4", type="run")
+ depends_on("py-pandas", type="run")
+ depends_on("py-pycodestyle", type="run")
+ depends_on("py-pybind11", type="run")
+ depends_on("py-pyhdf", type="run")
+ depends_on("py-pyyaml", type="run")
+ depends_on("py-scipy", type="run")
+ depends_on("py-xarray", type="run")
+ depends_on("py-pytest", type="run")
+ depends_on("py-fortranformat", type="run")
+
+ with when("+xnrl"):
+ depends_on("py-xnrl", type="run")
+
+ # There is no need for install() since there is no code.