Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated testing and 1.14 requirements #67

Merged
merged 8 commits into from
Jun 12, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 6 additions & 8 deletions .github/workflows/alone.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ on:

env:
MPICH_VERSION: 4.1.2
HDF5_VERSION: 1.14.2
ARGOBOTS_VERSION: 1.1
ASYNC_VOL_VERSION: 1.8.1

Expand All @@ -31,7 +30,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4.1.1
- name: Set up dependencies
run: |
sudo apt-get update
Expand Down Expand Up @@ -67,10 +66,9 @@ jobs:
rm -rf ${GITHUB_WORKSPACE}/HDF5
mkdir ${GITHUB_WORKSPACE}/HDF5
cd ${GITHUB_WORKSPACE}/HDF5
VER_MAJOR=${HDF5_VERSION%.*}
wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz
tar -zxf hdf5-${HDF5_VERSION}.tar.gz
cd hdf5-${HDF5_VERSION}
wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
tar -zxf hdf5.tar.gz
cd hdf5-*
./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \
--silent \
--enable-parallel \
Expand Down Expand Up @@ -115,7 +113,7 @@ jobs:
rm -rf ${ASYNC_DIR}
mkdir ${ASYNC_DIR}
cd ${ASYNC_DIR}
wget -qc https://github.com/hpc-io/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz
wget -qc https://github.com/HDFGroup/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz
tar -xf v${ASYNC_VOL_VERSION}.tar.gz
cd vol-async-${ASYNC_VOL_VERSION}
mkdir build
Expand All @@ -136,7 +134,7 @@ jobs:
rm -rf ${CAHCE_DIR}
mkdir ${CAHCE_DIR}
cd ${CAHCE_DIR}
git clone https://github.com/hpc-io/vol-cache.git
git clone https://github.com/HDFGroup/vol-cache.git
cd vol-cache
mkdir build
cd build
Expand Down
10 changes: 4 additions & 6 deletions .github/workflows/mac_mpich.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,26 +22,24 @@ on:

env:
MPICH_VERSION: 4.1.2
HDF5_VERSION: 1.14.2
NETCDF_VERSION: 4.9.2

jobs:
build:
runs-on: macos-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4.1.1
- name: Set up dependencies
run: |
brew instal automake autoconf libtool m4 open-mpi zlib
- name: Install HDF5
run: |
cd ${GITHUB_WORKSPACE}
rm -rf HDF5 ; mkdir HDF5 ; cd HDF5
VER_MAJOR=${HDF5_VERSION%.*}
wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz
tar -zxf hdf5-${HDF5_VERSION}.tar.gz
cd hdf5-${HDF5_VERSION}
wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
tar -zxf hdf5.tar.gz
cd hdf5-*
./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \
--silent \
--enable-parallel \
Expand Down
10 changes: 4 additions & 6 deletions .github/workflows/ubuntu_mpich.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,15 +22,14 @@ on:

env:
MPICH_VERSION: 4.1.2
HDF5_VERSION: 1.14.2
NETCDF_VERSION: 4.9.2

jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4.1.1
- name: Set up dependencies
run: |
sudo apt-get update
Expand Down Expand Up @@ -60,10 +59,9 @@ jobs:
run: |
cd ${GITHUB_WORKSPACE}
rm -rf HDF5 ; mkdir HDF5 ; cd HDF5
VER_MAJOR=${HDF5_VERSION%.*}
wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz
tar -zxf hdf5-${HDF5_VERSION}.tar.gz
cd hdf5-${HDF5_VERSION}
wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
tar -zxf hdf5.tar.gz
cd hdf5-*
./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \
--silent \
--enable-parallel \
Expand Down
10 changes: 4 additions & 6 deletions .github/workflows/ubuntu_openmpi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,14 @@ on:
- 'case_studies/*'

env:
HDF5_VERSION: 1.14.2
NETCDF_VERSION: 4.9.2

jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4.1.1
- name: Set up dependencies
run: |
sudo apt-get update
Expand All @@ -45,10 +44,9 @@ jobs:
cd ${GITHUB_WORKSPACE}
echo "Install HDF5 on ${GITHUB_WORKSPACE}/HDF5"
rm -rf HDF5 ; mkdir HDF5 ; cd HDF5
VER_MAJOR=${HDF5_VERSION%.*}
wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz
tar -zxf hdf5-${HDF5_VERSION}.tar.gz
cd hdf5-${HDF5_VERSION}
wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
tar -zxf hdf5.tar.gz
cd hdf5-*
./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \
--silent \
--enable-parallel \
Expand Down
14 changes: 6 additions & 8 deletions .github/workflows/ubuntu_stack_vols.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ on:

env:
MPICH_VERSION: 4.1.2
HDF5_VERSION: 1.14.2
ARGOBOTS_VERSION: 1.1
ASYNC_VOL_VERSION: 1.8.1

Expand All @@ -31,7 +30,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4.1.1
- name: Set up dependencies
run: |
sudo apt-get update
Expand Down Expand Up @@ -67,10 +66,9 @@ jobs:
rm -rf ${GITHUB_WORKSPACE}/HDF5
mkdir ${GITHUB_WORKSPACE}/HDF5
cd ${GITHUB_WORKSPACE}/HDF5
VER_MAJOR=${HDF5_VERSION%.*}
wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-${VER_MAJOR}/hdf5-${HDF5_VERSION}/src/hdf5-${HDF5_VERSION}.tar.gz
tar -zxf hdf5-${HDF5_VERSION}.tar.gz
cd hdf5-${HDF5_VERSION}
wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
tar -zxf hdf5.tar.gz
cd hdf5-*
./configure --prefix=${GITHUB_WORKSPACE}/HDF5 \
--silent \
--enable-parallel \
Expand Down Expand Up @@ -115,7 +113,7 @@ jobs:
rm -rf ${ASYNC_DIR}
mkdir ${ASYNC_DIR}
cd ${ASYNC_DIR}
wget -qc https://github.com/hpc-io/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz
wget -qc https://github.com/HDFGroup/vol-async/archive/refs/tags/v${ASYNC_VOL_VERSION}.tar.gz
tar -xf v${ASYNC_VOL_VERSION}.tar.gz
cd vol-async-${ASYNC_VOL_VERSION}
mkdir build
Expand All @@ -136,7 +134,7 @@ jobs:
rm -rf ${CAHCE_DIR}
mkdir ${CAHCE_DIR}
cd ${CAHCE_DIR}
git clone https://github.com/hpc-io/vol-cache.git
git clone https://github.com/HDFGroup/vol-cache.git
cd vol-cache
mkdir build
cd build
Expand Down
25 changes: 7 additions & 18 deletions configure.ac
Original file line number Diff line number Diff line change
Expand Up @@ -336,31 +336,20 @@ if test "x$have_hdf5" = xno ; then
to specify the location of HDF5 installation. Abort.
-----------------------------------------------------------------------])
fi
AC_MSG_CHECKING([whether HDF5 version is 1.13.0 (develop branch) or later])
AC_MSG_CHECKING([whether the HDF5 version is equal to or greater than 1.14.0])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
#include <hdf5.h>
#if (H5_VERS_MAJOR*1000000 + H5_VERS_MINOR*1000 + H5_VERS_RELEASE < 1013000)
#error HDF5 version is older than 1.13.0
#if (H5_VERS_MAJOR*1000000 + H5_VERS_MINOR*1000 + H5_VERS_RELEASE < 1014000)
#error HDF5 version is older than 1.14.0
#endif
]])], [hdf5_ge_1_13_0=yes], [hdf5_ge_1_13_0=no])
AC_MSG_RESULT([$hdf5_ge_1_13_0])
if test x$hdf5_ge_1_13_0 = xno; then
]])], [hdf5_ge_1_14_0=yes], [hdf5_ge_1_14_0=no])
AC_MSG_RESULT([$hdf5_ge_1_14_0])
if test x$hdf5_ge_1_14_0 = xno; then
AC_MSG_ERROR([
-----------------------------------------------------------------------
H5VL_log requires HDF5 1.13.0 and later. Abort.
H5VL_log requires HDF5 1.14.0 and later. Abort.
-----------------------------------------------------------------------])
fi
AC_MSG_CHECKING([whether HDF5 version is 1.13.3 or later])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
#include <hdf5.h>
#if (H5_VERS_MAJOR*1000000 + H5_VERS_MINOR*1000 + H5_VERS_RELEASE < 1013003)
#error HDF5 version is older than 1.13.3
#endif
]])], [hdf5_ge_1_13_3=yes], [hdf5_ge_1_13_3=no])
AC_MSG_RESULT([$hdf5_ge_1_13_3])
if test x$hdf5_ge_1_13_3 = xyes; then
AC_DEFINE(HDF5_GE_1133, 1, ["HDF5 version greater than 1.13.3"])
fi
AC_MSG_CHECKING([whether HDF5 parallel I/O is enabled])
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[
#include <hdf5.h>
Expand Down
18 changes: 9 additions & 9 deletions doc/INSTALL.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
## The Log VOL connector - Build Instructions

### Software Requirements
* [HDF5 1.13.0](https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.13/hdf5-1.13.2/src/hdf5-1.13.2.tar.gz)
* [Minimum HDF5 1.14](https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz)
+ Configured with parallel I/O support (--enable-parallel)
* MPI C and C++ compilers
+ The plugin uses the constant initializer; a C++ compiler supporting std 17 is required
Expand All @@ -12,20 +12,20 @@
+ [m4](https://www.gnu.org/software/m4/) 1.4.18

### Building HDF5 libraries
* HDF5 1.13.0 and later (**required**)
+ Download HDF5 official release version 1.13.0.
* HDF5 1.14.0 and later (**required**)
+ Download HDF5's official latest release.
```
% wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.13/hdf5-1.13.2/src/hdf5-1.13.2.tar.gz
% wget https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
```
+ Configure HDF5 with parallel I/O enabled.
```
% tar -zxf hdf5-1_13_0.tar.gz
% cd hdf5-1_13_0.tar.gz
% ./configure --prefix=${HOME}/HDF5/1.13.0 --enable-parallel CC=mpicc
% tar -zxf hdf5.tar.gz
% cd hdf5-*
% ./configure --prefix=${HOME}/HDF5/latest --enable-parallel CC=mpicc
% make -j4 install
```
+ The above example commands will install the HD5 library under the folder
`${HOME}/HDF5/1.13.0`.
`${HOME}/HDF5/latest`.

### Building the Log VOL connector
* Obtain the source code package by either downloading the official release or
Expand All @@ -44,7 +44,7 @@
```
* Example configure and make commands are given below.
```
% ./configure --prefix=${HOME}/Log_IO_VOL --with-hdf5=${HOME}/HDF5/1.13.0
% ./configure --prefix=${HOME}/Log_IO_VOL --with-hdf5=${HOME}/HDF5/latest
% make -j 4 install
```
+ The above commands will install the log-vol library under the folder `${HOME}/Log_IO_VOL`.
Expand Down
12 changes: 6 additions & 6 deletions doc/log_cache_async_vol.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
* [Build Instructions](#build-instructions)
* [Run Instructions](#run-e3sm-io)

This demo uses [E3SM-IO](https://github.com/Parallel-NetCDF/E3SM-IO) to show how to run the Log VOL connector on top of the [Cache VOL connector](https://github.com/hpc-io/vol-cache) and the [Async VOL connector](https://github.com/hpc-io/vol-async).
This demo uses [E3SM-IO](https://github.com/Parallel-NetCDF/E3SM-IO) to show how to run the Log VOL connector on top of the [Cache VOL connector](https://github.com/hpc-io/vol-cache) and the [Async VOL connector](https://github.com/HDFGroup/vol-async).
brtnfld marked this conversation as resolved.
Show resolved Hide resolved

E3SM-IO is an I/O benchmark suite that measures the performance I/O kernel of
[E3SM](https://github.com/E3SM-Project/E3SM), a state-of-the-art Earth system modeling,
Expand Down Expand Up @@ -30,17 +30,17 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the
% export HDF5_ROOT=${HDF5_DIR}
```

+ HDF5 1.13.3: `--enable-parallel`, `--enable-threadsafe`, and `--enable-unsupported` are [required by Async VOL](https://hdf5-vol-async.readthedocs.io/en/latest/gettingstarted.html#build-async-i-o-vol) at configure time.
+ Minimum HDF5 1.14: `--enable-parallel`, `--enable-threadsafe`, and `--enable-unsupported` are [required by Async VOL](https://hdf5-vol-async.readthedocs.io/en/latest/gettingstarted.html#build-async-i-o-vol) at configure time.

```shell
# create a new folder "HDF5" under $WORKSPACE
% mkdir ${HDF5_DIR}
% cd ${HDF5_DIR}

# download HDF5 source codes
% wget -cq https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.13/hdf5-1.13.3/src/hdf5-1.13.3.tar.gz
% tar -zxf hdf5-1.13.3.tar.gz
% cd hdf5-1.13.3
% wget -cq https://github.com/HDFGroup/hdf5/releases/latest/download/hdf5.tar.gz
% tar -zxf hdf5.tar.gz
% cd hdf5-*

# configure, output saved to log.config
% ./configure --prefix=${HDF5_DIR} \
Expand Down Expand Up @@ -98,7 +98,7 @@ The Log, Cache, and Async VOL connectors can be enabled by directly setting the
% cd ${ASYNC_DIR}

# download Async VOL source codes and create a build folder
% wget -qc https://github.com/hpc-io/vol-async/archive/refs/tags/v1.4.tar.gz
% wget -qc https://github.com/HDFGroup/vol-async/archive/refs/tags/v1.4.tar.gz
% tar -xf v1.4.tar.gz
% cd vol-async-1.4
% mkdir build
Expand Down
12 changes: 8 additions & 4 deletions src/H5VL_log_dataset.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ err_out:;
*
*-------------------------------------------------------------------------
*/
herr_t H5VL_log_dataset_read_1 (void *dset,
static herr_t H5VL_log_dataset_read_1 (void *dset,
hid_t mem_type_id,
hid_t mem_space_id,
hid_t file_space_id,
Expand Down Expand Up @@ -334,7 +334,7 @@ err_out:;
*
*-------------------------------------------------------------------------
*/
herr_t H5VL_log_dataset_write_1 (void *dset,
static herr_t H5VL_log_dataset_write_1 (void *dset,
hid_t mem_type_id,
hid_t mem_space_id,
hid_t file_space_id,
Expand Down Expand Up @@ -378,7 +378,7 @@ err_out:;
return err;
} /* end H5VL_log_dataset_write() */

herr_t H5VL_log_dataset_read_2 (size_t count,
herr_t H5VL_log_dataset_read (size_t count,
void *dset[],
hid_t mem_type_id[],
hid_t mem_space_id[],
Expand All @@ -387,6 +387,10 @@ herr_t H5VL_log_dataset_read_2 (size_t count,
void *buf[],
void **req) {
herr_t err = 0;
H5VL_log_dset_t *dp = (H5VL_log_dset_t *)dset;
H5VL_log_dset_info_t *dip = NULL; // Dataset info
hid_t dsid; // Dataset space id
H5VL_log_selections *dsel = NULL; // Selection blocks
brtnfld marked this conversation as resolved.
Show resolved Hide resolved
size_t i;

for (i = 0; i < count; i++) {
Expand All @@ -398,7 +402,7 @@ herr_t H5VL_log_dataset_read_2 (size_t count,
return err;
}

herr_t H5VL_log_dataset_write_2 (size_t count,
herr_t H5VL_log_dataset_write (size_t count,
void *dset[],
hid_t mem_type_id[],
hid_t mem_space_id[],
Expand Down
Loading