Skip to content

Merge remote-tracking branch 'origin/develop' into feature_2966_local… #4518

Merge remote-tracking branch 'origin/develop' into feature_2966_local…

Merge remote-tracking branch 'origin/develop' into feature_2966_local… #4518

Workflow file for this run

name: Testing
# Compile MET for feature branches
# Run unit tests for pull requests
on:
push:
branches:
- develop
- develop-ref
- 'feature_**'
- 'main_v**'
- 'bugfix_**'
paths-ignore:
- 'docs/**'
- '.github/pull_request_template.md'
- '.github/ISSUE_TEMPLATE/**'
- '.github/labels/**'
- '**/README.md'
- '**/LICENSE.md'
pull_request:
types: [opened, reopened, synchronize]
branches:
- develop
- 'main_v**'
paths-ignore:
- 'docs/**'
- '.github/pull_request_template.md'
- '.github/ISSUE_TEMPLATE/**'
- '.github/labels/**'
- '**/README.md'
- '**/LICENSE.md'
workflow_dispatch:
inputs:
force_tests:
description: 'Run the unit tests'
default: true
type: boolean
env:
DOCKERHUB_REPO: dtcenter/met-dev
jobs:
job_control:
name: Determine which jobs to run
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set job controls
id: job_status
run: .github/jobs/set_job_controls.sh
env:
commit_msg: ${{ github.event.head_commit.message }}
force_tests: ${{ github.event.inputs.force_tests }}
outputs:
run_compile: ${{ steps.job_status.outputs.run_compile }}
run_push: ${{ steps.job_status.outputs.run_push }}
run_unit_tests: ${{ steps.job_status.outputs.run_unit_tests }}
run_diff: ${{ steps.job_status.outputs.run_diff }}
run_update_truth: ${{ steps.job_status.outputs.run_update_truth }}
met_base_repo: ${{ steps.job_status.outputs.met_base_repo }}
met_base_tag: ${{ steps.job_status.outputs.met_base_tag }}
branch_name: ${{ steps.job_status.outputs.branch_name }}
truth_data_version: ${{ steps.job_status.outputs.truth_data_version }}
input_data_version: ${{ steps.job_status.outputs.input_data_version }}
compile:
name: Compile MET
runs-on: ubuntu-latest
needs: job_control
if: ${{ needs.job_control.outputs.run_compile == 'true' }}
steps:
- uses: actions/checkout@v4
- name: Create directories to store output
run: mkdir -p ${RUNNER_WORKSPACE}/logs
- name: Compile MET in Docker
run: .github/jobs/build_docker_image.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
MET_BASE_REPO: ${{ needs.job_control.outputs.met_base_repo }}
MET_BASE_TAG: ${{ needs.job_control.outputs.met_base_tag }}
MET_CONFIG_OPTS: '--enable-all'
- name: Copy all build log files into logs directory
if: always()
run: cp ${GITHUB_WORKSPACE}/*.log ${RUNNER_WORKSPACE}/logs/
- name: Push Docker Image
run: .github/jobs/push_docker_image.sh
if: ${{ needs.job_control.outputs.run_push == 'true' }}
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_compile
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
update_input_data:
name: Update input data volumes
runs-on: ubuntu-latest
needs: [job_control]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
steps:
- uses: dtcenter/metplus-action-data-update@v3
with:
docker_name: ${{ secrets.DOCKER_USERNAME }}
docker_pass: ${{ secrets.DOCKER_PASSWORD }}
repo_name: ${{ github.repository }}
data_prefix: unit_test
branch_name: ${{ needs.job_control.outputs.truth_data_version }}
docker_data_dir: /data/input/MET_test_data
data_repo_dev: met-data-dev
unit_1a:
name: Unit 1a
runs-on: ubuntu-latest
needs: [job_control, update_input_data, compile]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
strategy:
matrix:
include:
- jobid: 'job1'
tests: 'ascii2nc'
- jobid: 'job2'
tests: 'pb2nc madis2nc pcp_combine gen_ens_prod'
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Run Unit Tests in Docker
run: .github/jobs/run_unit_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
TESTS: ${{ matrix.tests }}
INPUT_DATA_VERSION: ${{ needs.job_control.outputs.input_data_version }}-all
- name: Upload output as artifact
uses: actions/upload-artifact@v4
with:
name: unit_1a_${{ matrix.jobid }}
path: ${{ runner.workspace }}/output
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_unit_1a_${{ matrix.jobid }}
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
unit_1b:
name: Unit 1b
runs-on: ubuntu-latest
needs: [job_control, update_input_data, compile]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
strategy:
matrix:
include:
- jobid: 'job1'
tests: 'ascii2nc_indy pb2nc_indy tc_dland tc_pairs tc_stat plot_tc tc_rmw rmw_analysis tc_diag tc_gen'
- jobid: 'job2'
tests: 'met_test_scripts mode_multivar mode_graphics mtd regrid airnow gsi_tools netcdf modis series_analysis wwmca_regrid gen_vx_mask interp_shape grid_diag grib_tables lidar2nc shift_data_plane trmm2nc aeronet wwmca_plot ioda2nc gaussian'
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Run Unit Tests in Docker
run: .github/jobs/run_unit_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
TESTS: ${{ matrix.tests }}
INPUT_DATA_VERSION: ${{ needs.job_control.outputs.input_data_version }}-all
- name: Upload output as artifact
uses: actions/upload-artifact@v4
with:
name: unit_1b_${{ matrix.jobid }}
path: ${{ runner.workspace }}/output
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_unit_1b_${{ matrix.jobid }}
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
unit_1c:
name: Unit 1c
runs-on: ubuntu-latest
needs: [job_control, update_input_data, compile]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
strategy:
matrix:
include:
- jobid: 'job1'
tests: 'ref_config_lead_00 ref_config_lead_12'
- jobid: 'job2'
tests: 'ref_config_lead_24 ref_config_lead_48'
- jobid: 'job3'
tests: 'ref_config_lead_36'
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Run Unit Tests in Docker
run: .github/jobs/run_unit_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
TESTS: ${{ matrix.tests }}
INPUT_DATA_VERSION: ${{ needs.job_control.outputs.input_data_version }}-all
- name: Upload output as artifact
uses: actions/upload-artifact@v4
with:
name: unit_1c_${{ matrix.jobid }}
path: ${{ runner.workspace }}/output
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_rc_leads_${{ matrix.jobid }}
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
unit_2c:
name: Unit 2c
runs-on: ubuntu-latest
needs: [job_control, unit_1c]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
strategy:
matrix:
include:
- jobid: 'job1'
tests: 'ref_config'
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Download ref_config_leads output from artifact
uses: actions/download-artifact@v4
with:
path: ${{ runner.workspace }}/output
pattern: unit_1c_job*
merge-multiple: true
- name: Run Unit Tests in Docker
run: .github/jobs/run_unit_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
TESTS: ${{ matrix.tests }}
INPUT_DATA_VERSION: 'none'
- name: Upload output as artifact
uses: actions/upload-artifact@v4
with:
name: unit_2c_${{ matrix.jobid }}
path: ${{ runner.workspace }}/output
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_unit_2c_${{ matrix.jobid }}
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
unit_2a:
name: Unit 2a
runs-on: ubuntu-latest
needs: [job_control, unit_1a]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
strategy:
matrix:
include:
- jobid: 'job1'
tests: 'point_stat stat_analysis_ps'
- jobid: 'job2'
tests: 'grid_stat stat_analysis_gs'
- jobid: 'job3'
tests: 'wavelet_stat stat_analysis_ws'
- jobid: 'job4'
tests: 'ensemble_stat stat_analysis_es'
- jobid: 'job5'
tests: 'ugrid'
- jobid: 'job6'
tests: 'grid_weight point_weight'
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Download 1a output from artifact
uses: actions/download-artifact@v4
with:
path: ${{ runner.workspace }}/output
pattern: unit_1a_job*
merge-multiple: true
- name: Run Unit Tests in Docker
run: .github/jobs/run_unit_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
TESTS: ${{ matrix.tests }}
INPUT_DATA_VERSION: ${{ needs.job_control.outputs.input_data_version }}-all
- name: Upload output as artifact
uses: actions/upload-artifact@v4
with:
name: unit_2a_${{ matrix.jobid }}
path: ${{ runner.workspace }}/output
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_unit_2a_${{ matrix.jobid }}
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
unit_2b:
name: Unit 2b
runs-on: ubuntu-latest
needs: [job_control, unit_1a]
if: ${{ needs.job_control.outputs.run_unit_tests == 'true' }}
strategy:
matrix:
include:
- jobid: 'job1'
tests: 'climatology_1.0deg'
- jobid: 'job2'
tests: 'climatology_1.5deg'
- jobid: 'job3'
tests: 'climatology_2.5deg'
- jobid: 'job4'
tests: 'climatology_mixed'
- jobid: 'job5'
tests: 'python point2grid plot_data_plane mode mode_analysis perc_thresh hira plot_point_obs quality_filter obs_summary duplicate_flag'
fail-fast: false
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Download 1a output from artifact
uses: actions/download-artifact@v4
with:
path: ${{ runner.workspace }}/output
pattern: unit_1a_job*
merge-multiple: true
- name: Run Unit Tests in Docker
run: .github/jobs/run_unit_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
TESTS: ${{ matrix.tests }}
INPUT_DATA_VERSION: ${{ needs.job_control.outputs.input_data_version }}-all
- name: Upload output as artifact
uses: actions/upload-artifact@v4
with:
name: unit_2b_${{ matrix.jobid }}
path: ${{ runner.workspace }}/output
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_unit_2b_${{ matrix.jobid }}
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
run_diffs:
name: Check for Differences
runs-on: ubuntu-latest
needs: [job_control, unit_1b, unit_2a, unit_2b, unit_2c]
if: ${{ needs.job_control.outputs.run_diff == 'true' }}
steps:
- name: Download data from previous jobs
uses: actions/download-artifact@v4
- name: Copy test output into single directory
run: |
mkdir ${RUNNER_WORKSPACE}/output
cp -r unit_*/* ${RUNNER_WORKSPACE}/output/
- uses: actions/checkout@v4
- name: Run Diff Tests in Docker
run: .github/jobs/run_diff_docker.sh
env:
SOURCE_BRANCH: ${{ needs.job_control.outputs.branch_name }}
RUN_UPDATE_TRUTH: ${{ needs.job_control.outputs.run_update_truth }}
TRUTH_DATA_VERSION: ${{ needs.job_control.outputs.truth_data_version }}
- name: Upload diff files as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: diff
path: ${{ runner.workspace }}/diff
if-no-files-found: ignore
- name: Upload logs as artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: logs_diff
path: ${{ runner.workspace }}/logs
if-no-files-found: ignore
merge_logs:
name: Merge Log Artifacts
runs-on: ubuntu-latest
needs: [run_diffs]
if: ${{ always() }}
steps:
- name: Upload merged logs as artifact
uses: actions/upload-artifact/merge@v4
with:
name: logs
pattern: logs_*
delete-merged: true
update_truth:
name: Update Truth Data
runs-on: ubuntu-latest
needs: [job_control, unit_1b, unit_2a, unit_2b, unit_2c]
if: ${{ needs.job_control.outputs.run_update_truth == 'true' }}
steps:
- uses: actions/checkout@v4
- name: Free disk space
run: .github/jobs/free_disk_space.sh
- name: Download data from previous jobs
uses: actions/download-artifact@v4
- name: Copy test output into single directory
run: |
mkdir ${RUNNER_WORKSPACE}/met_test_truth
cp -r unit_*/* ${RUNNER_WORKSPACE}/met_test_truth/
- name: Create Docker Data Volume
run: .github/jobs/create_docker_truth.sh
env:
TRUTH_DATA_VERSION: ${{ needs.job_control.outputs.truth_data_version }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}