diff --git a/jobs/JREGIONAL_GET_OBS_CCPA b/jobs/JREGIONAL_GET_OBS_CCPA
new file mode 100755
index 000000000..6f541ccd2
--- /dev/null
+++ b/jobs/JREGIONAL_GET_OBS_CCPA
@@ -0,0 +1,94 @@
+#!/bin/bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script pulls CCPA observation data for comparison to the model for
+# the requested accumulations. Supported accumulations: 01h, 03h, and 06h.
+# NOTE: Accumulation is currently hardcoded to 01h.
+# The verification uses MET/pcp-combine to sum 01h files into
+# desired accumulations.
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u -x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that pulls CCPA observation data
+for verification purposes.
+========================================================================"
+
+#
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job and pass to it the necessary varia-
+# bles.
+#
+#-----------------------------------------------------------------------
+#
+$SCRIPTSDIR/exregional_get_ccpa_files.sh || \
+print_err_msg_exit "\
+Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+#
+#-----------------------------------------------------------------------
+#
+# Print exit message.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/jobs/JREGIONAL_GET_OBS_MRMS b/jobs/JREGIONAL_GET_OBS_MRMS
new file mode 100755
index 000000000..3524532c6
--- /dev/null
+++ b/jobs/JREGIONAL_GET_OBS_MRMS
@@ -0,0 +1,90 @@
+#!/bin/bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script pulls MRMS observation data for comparison to the model.
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u -x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that pulls MRMS observation data
+for verification purposes.
+========================================================================"
+
+#
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job and pass to it the necessary varia-
+# bles.
+#
+#-----------------------------------------------------------------------
+#
+$SCRIPTSDIR/exregional_get_mrms_files.sh || \
+print_err_msg_exit "\
+Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+#
+#-----------------------------------------------------------------------
+#
+# Print exit message.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/jobs/JREGIONAL_GET_OBS_NDAS b/jobs/JREGIONAL_GET_OBS_NDAS
new file mode 100755
index 000000000..32c1d819e
--- /dev/null
+++ b/jobs/JREGIONAL_GET_OBS_NDAS
@@ -0,0 +1,90 @@
+#!/bin/bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script pulls NDAS observation data for comparison to the model.
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u -x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that pulls NDAS observation data
+for verification purposes.
+========================================================================"
+
+#
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job and pass to it the necessary varia-
+# bles.
+#
+#-----------------------------------------------------------------------
+#
+$SCRIPTSDIR/exregional_get_ndas_files.sh || \
+print_err_msg_exit "\
+Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+#
+#-----------------------------------------------------------------------
+#
+# Print exit message.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/jobs/JREGIONAL_RUN_VX_GRIDSTAT b/jobs/JREGIONAL_RUN_VX_GRIDSTAT
new file mode 100755
index 000000000..ca29e2d93
--- /dev/null
+++ b/jobs/JREGIONAL_RUN_VX_GRIDSTAT
@@ -0,0 +1,128 @@
+#!/bin/bash
+
+#
+#-----------------------------------------------------------------------
+#
+# This script runs METplus for grid-stat on the UPP output files by
+# initialization time for all forecast hours.
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u -x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs METplus for grid-stat
+by initialization time for all forecast hours.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# If it doesn't already exist, create the directory (vx_dir) in
+# which to store grid-stat output.
+#
+# Note that there may be a preexisting version of vx_dir from previous
+# runs of this script (e.g. from the workflow task that runs this script
+# failing and then being called again). Thus, we first make sure
+# preexisting versions are deleted.
+#
+#-----------------------------------------------------------------------
+#
+
+if [ "${RUN_ENVIR}" = "nco" ]; then
+ COMOUT="$COMROOT/$NET/$envir/$RUN.$PDY/$cyc"
+ postprd_dir="$COMOUT"
+else
+ postprd_dir="${CYCLE_DIR}/postprd"
+fi
+mkdir_vrfy -p "${postprd_dir}"
+
+if [ "${RUN_ENVIR}" = "nco" ]; then
+ COMOUT="$COMROOT/$NET/$envir/$RUN.$PDY/$cyc"
+ vx_dir="$COMOUT"
+else
+ vx_dir="${CYCLE_DIR}/metprd"
+fi
+mkdir_vrfy -p "${vx_dir}"
+
+gridstat_dir="${vx_dir}/grid_stat"
+#check_for_preexist_dir "${gridstat_dir}" "delete"
+mkdir_vrfy -p "${gridstat_dir}"
+
+cd_vrfy "${gridstat_dir}"
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job and pass to it the necessary varia-
+# bles.
+#
+#-----------------------------------------------------------------------
+#
+$SCRIPTSDIR/exregional_run_gridstatvx.sh \
+ cycle_dir="${CYCLE_DIR}" \
+ postprd_dir="${postprd_dir}" \
+ vx_dir="${vx_dir}" \
+ gridstat_dir="${gridstat_dir}" || \
+print_err_msg_exit "\
+Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+#
+#-----------------------------------------------------------------------
+#
+# Print exit message.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/jobs/JREGIONAL_RUN_VX_POINTSTAT b/jobs/JREGIONAL_RUN_VX_POINTSTAT
new file mode 100755
index 000000000..f5c35253a
--- /dev/null
+++ b/jobs/JREGIONAL_RUN_VX_POINTSTAT
@@ -0,0 +1,126 @@
+#!/bin/bash
+
+#
+#-----------------------------------------------------------------------
+#
+#
+#-----------------------------------------------------------------------
+#
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u -x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the J-job script for the task that runs METplus for point-stat
+by initialitation time for all forecast hours.
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# If it doesn't already exist, create the directory (vx_dir) in
+# which to store point-stat output.
+#
+# Note that there may be a preexisting version of vx_dir from previous
+# runs of this script (e.g. from the workflow task that runs this script
+# failing and then being called again). Thus, we first make sure
+# preexisting versions are deleted.
+#
+#-----------------------------------------------------------------------
+#
+
+if [ "${RUN_ENVIR}" = "nco" ]; then
+ COMOUT="$COMROOT/$NET/$envir/$RUN.$PDY/$cyc"
+ postprd_dir="$COMOUT"
+else
+ postprd_dir="${CYCLE_DIR}/postprd"
+fi
+mkdir_vrfy -p "${postprd_dir}"
+
+if [ "${RUN_ENVIR}" = "nco" ]; then
+ COMOUT="$COMROOT/$NET/$envir/$RUN.$PDY/$cyc"
+ vx_dir="$COMOUT"
+else
+ vx_dir="${CYCLE_DIR}/metprd"
+fi
+mkdir_vrfy -p "${vx_dir}"
+
+pointstat_dir="${vx_dir}/point_stat"
+#check_for_preexist_dir "${pointstat_dir}" "delete"
+mkdir_vrfy -p "${pointstat_dir}"
+
+cd_vrfy "${pointstat_dir}"
+#
+#-----------------------------------------------------------------------
+#
+# Call the ex-script for this J-job and pass to it the necessary varia-
+# bles.
+#
+#-----------------------------------------------------------------------
+#
+$SCRIPTSDIR/exregional_run_pointstatvx.sh \
+ cycle_dir="${CYCLE_DIR}" \
+ postprd_dir="${postprd_dir}" \
+ vx_dir="${vx_dir}" \
+ pointstat_dir="${pointstat_dir}" || \
+print_err_msg_exit "\
+Call to ex-script corresponding to J-job \"${scrfunc_fn}\" failed."
+#
+#-----------------------------------------------------------------------
+#
+# Print exit message.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
+
diff --git a/modulefiles/tasks/hera/get_obs.local b/modulefiles/tasks/hera/get_obs.local
new file mode 100644
index 000000000..47c1c6f31
--- /dev/null
+++ b/modulefiles/tasks/hera/get_obs.local
@@ -0,0 +1,9 @@
+#%Module#####################################################
+## Module file for get_obs_ccpa, get_obs_mrms, and
+## get_obs_ndas task.
+#############################################################
+
+module purge
+
+module load hpss
+
diff --git a/modulefiles/tasks/hera/run_vx.local b/modulefiles/tasks/hera/run_vx.local
new file mode 100644
index 000000000..99c35f92e
--- /dev/null
+++ b/modulefiles/tasks/hera/run_vx.local
@@ -0,0 +1,6 @@
+#%Module
+module use -a /contrib/anaconda/modulefiles
+module load anaconda/latest
+module use -a /contrib/met/modulefiles/
+module load met/9.0.2
+
diff --git a/scripts/exregional_get_ccpa_files.sh b/scripts/exregional_get_ccpa_files.sh
new file mode 100755
index 000000000..2354938f1
--- /dev/null
+++ b/scripts/exregional_get_ccpa_files.sh
@@ -0,0 +1,218 @@
+#!/bin/sh
+
+# This script reorganizes the CCPA data into a more intuitive structure:
+# A valid YYYYMMDD directory is created, and all files for the valid day are placed within the directory.
+# Supported accumulations: 01h, 03h, and 06h. NOTE: Accumulation is currently hardcoded to 01h.
+# The verification uses MET/pcp-combine to sum 01h files into desired accumulations.
+
+# Top-level CCPA directory
+ccpa_dir=${OBS_DIR}/..
+if [[ ! -d "$ccpa_dir" ]]; then
+ mkdir -p $ccpa_dir
+fi
+
+# CCPA data from HPSS
+ccpa_raw=$ccpa_dir/raw
+if [[ ! -d "$ccpa_raw" ]]; then
+ mkdir -p $ccpa_raw
+fi
+
+# Reorganized CCPA location
+ccpa_proc=$ccpa_dir/proc
+if [[ ! -d "$ccpa_proc" ]]; then
+ mkdir -p $ccpa_proc
+fi
+
+# Accumulation is for accumulation of CCPA data to pull (hardcoded to 01h, see note above.)
+#accum=${ACCUM}
+accum=01
+
+# Initialization
+yyyymmdd=${CDATE:0:8}
+hh=${CDATE:8:2}
+cyc=$hh
+
+init=${CDATE}${hh}
+
+fhr_last=`echo ${FHR} | awk '{ print $NF }'`
+
+# Forecast length
+fcst_length=${fhr_last}
+
+current_fcst=$accum
+while [[ ${current_fcst} -le ${fcst_length} ]]; do
+ # Calculate valid date info
+ fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds
+ yyyy=`echo ${init} | cut -c1-4` # year (YYYY) of initialization time
+ mm=`echo ${init} | cut -c5-6` # month (MM) of initialization time
+ dd=`echo ${init} | cut -c7-8` # day (DD) of initialization time
+ hh=`echo ${init} | cut -c9-10` # hour (HH) of initialization time
+ init_ut=`date -ud ''${yyyy}-${mm}-${dd}' UTC '${hh}':00:00' +%s` # convert initialization time to universal time
+ vdate_ut=`expr ${init_ut} + ${fcst_sec}` # calculate current forecast time in universal time
+ vdate=`date -ud '1970-01-01 UTC '${vdate_ut}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd=`echo ${vdate} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy=`echo ${vdate} | cut -c1-4` # year (YYYY) of valid time
+ vmm=`echo ${vdate} | cut -c5-6` # month (MM) of valid time
+ vdd=`echo ${vdate} | cut -c7-8` # day (DD) of valid time
+ vhh=`echo ${vdate} | cut -c9-10` # forecast hour (HH)
+
+ vhh_noZero=$(expr ${vhh} + 0)
+
+ # Calculate valid date - 1 day
+ vdate_ut_m1=`expr ${vdate_ut} - 86400`
+ vdate_m1=`date -ud '1970-01-01 UTC '${vdate_ut_m1}' seconds' +%Y%m%d%H`
+ vyyyymmdd_m1=`echo ${vdate_m1} | cut -c1-8`
+ vyyyy_m1=`echo ${vdate_m1} | cut -c1-4`
+ vmm_m1=`echo ${vdate_m1} | cut -c5-6`
+ vdd_m1=`echo ${vdate_m1} | cut -c7-8`
+ vhh_m1=`echo ${vdate_m1} | cut -c9-10`
+
+ # Calculate valid date + 1 day
+ vdate_ut_p1=`expr ${vdate_ut} + 86400`
+ vdate_p1=`date -ud '1970-01-01 UTC '${vdate_ut_p1}' seconds' +%Y%m%d%H`
+ vyyyymmdd_p1=`echo ${vdate_p1} | cut -c1-8`
+ vyyyy_p1=`echo ${vdate_p1} | cut -c1-4`
+ vmm_p1=`echo ${vdate_p1} | cut -c5-6`
+ vdd_p1=`echo ${vdate_p1} | cut -c7-8`
+ vhh_p1=`echo ${vdate_p1} | cut -c9-10`
+
+ # Create necessary raw and prop directories
+ if [[ ! -d "$ccpa_raw/${vyyyymmdd}" ]]; then
+ mkdir -p $ccpa_raw/${vyyyymmdd}
+ fi
+
+ if [[ ! -d "$ccpa_raw/${vyyyymmdd_m1}" ]]; then
+ mkdir -p $ccpa_raw/${vyyyymmdd_m1}
+ fi
+
+ if [[ ! -d "$ccpa_raw/${vyyyymmdd_p1}" ]]; then
+ mkdir -p $ccpa_raw/${vyyyymmdd_p1}
+ fi
+
+ if [[ ! -d "$ccpa_proc/${vyyyymmdd}" ]]; then
+ mkdir -p $ccpa_proc/${vyyyymmdd}
+ fi
+
+ # Name of CCPA tar file on HPSS is dependent on date. Logic accounts for files from 2019 until Sept. 2020.
+ if [[ ${vyyyymmdd} -ge 20190101 && ${vyyyymmdd} -lt 20190812 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/com2_ccpa_prod_ccpa.${vyyyy}${vmm}${vdd}.tar"
+ fi
+
+ if [[ ${vyyyymmdd_m1} -ge 20190101 && ${vyyyymmdd_m1} -lt 20190812 ]]; then
+ TarFile_m1="/NCEPPROD/hpssprod/runhistory/rh${vyyyy_m1}/${vyyyy_m1}${vmm_m1}/${vyyyy_m1}${vmm_m1}${vdd_m1}/com2_ccpa_prod_ccpa.${vyyyy_m1}${vmm_m1}${vdd_m1}.tar"
+ fi
+
+ if [[ ${vyyyymmdd_p1} -ge 20190101 && ${vyyyymmdd_p1} -lt 20190812 ]]; then
+ TarFile_p1="/NCEPPROD/hpssprod/runhistory/rh${vyyyy_p1}/${vyyyy_p1}${vmm_p1}/${vyyyy_p1}${vmm_p1}${vdd_p1}/com2_ccpa_prod_ccpa.${vyyyy_p1}${vmm_p1}${vdd_p1}.tar"
+ fi
+
+ if [[ ${vyyyymmdd} -ge 20190812 && ${vyyyymmdd} -le 20200217 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/gpfs_dell1_nco_ops_com_ccpa_prod_ccpa.${vyyyy}${vmm}${vdd}.tar"
+ fi
+
+ if [[ ${vyyyymmdd_m1} -ge 20190812 && ${vyyyymmdd_m1} -le 20200217 ]]; then
+ TarFile_m1="/NCEPPROD/hpssprod/runhistory/rh${vyyyy_m1}/${vyyyy_m1}${vmm_m1}/${vyyyy_m1}${vmm_m1}${vdd_m1}/gpfs_dell1_nco_ops_com_ccpa_prod_ccpa.${vyyyy_m1}${vmm_m1}${vdd_m1}.tar"
+ fi
+
+ if [[ ${vyyyymmdd_p1} -ge 20190812 && ${vyyyymmdd_p1} -le 20200217 ]]; then
+ TarFile_p1="/NCEPPROD/hpssprod/runhistory/rh${vyyyy_p1}/${vyyyy_p1}${vmm_p1}/${vyyyy_p1}${vmm_p1}${vdd_p1}/gpfs_dell1_nco_ops_com_ccpa_prod_ccpa.${vyyyy_p1}${vmm_p1}${vdd_p1}.tar"
+ fi
+
+ if [[ ${vyyyymmdd} -gt 20200217 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/com_ccpa_prod_ccpa.${vyyyy}${vmm}${vdd}.tar"
+ fi
+
+ if [[ ${vyyyymmdd_m1} -gt 20200217 ]]; then
+ TarFile_m1="/NCEPPROD/hpssprod/runhistory/rh${vyyyy_m1}/${vyyyy_m1}${vmm_m1}/${vyyyy_m1}${vmm_m1}${vdd_m1}/com_ccpa_prod_ccpa.${vyyyy_m1}${vmm_m1}${vdd_m1}.tar"
+ fi
+
+ if [[ ${vyyyymmdd_p1} -gt 20200217 ]]; then
+ TarFile_p1="/NCEPPROD/hpssprod/runhistory/rh${vyyyy_p1}/${vyyyy_p1}${vmm_p1}/${vyyyy_p1}${vmm_p1}${vdd_p1}/com_ccpa_prod_ccpa.${vyyyy_p1}${vmm_p1}${vdd_p1}.tar"
+ fi
+
+ # Check if file exists on disk; if not, pull it.
+ ccpa_file="$ccpa_proc/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2"
+ echo "CCPA FILE:${ccpa_file}"
+ if [[ ! -f "${ccpa_file}" ]]; then
+ if [[ ${accum} == "01" ]]; then
+ # Check if valid hour is 00
+ if [[ ${vhh_noZero} -ge 19 && ${vhh_noZero} -le 23 ]]; then
+ cd $ccpa_raw/${vyyyymmdd_p1}
+ # Pull CCPA data from HPSS
+ TarCommand="htar -xvf ${TarFile_p1} \`htar -tf ${TarFile_p1} | egrep \"ccpa.t${vhh}z.${accum}h.hrap.conus.gb2\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile_p1} `htar -tf ${TarFile_p1} | egrep "ccpa.t${vhh}z.${accum}h.hrap.conus.gb2" | awk '{print $7}'`
+ else
+ cd $ccpa_raw/${vyyyymmdd}
+ # Pull CCPA data from HPSS
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"ccpa.t${vhh}z.${accum}h.hrap.conus.gb2\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "ccpa.t${vhh}z.${accum}h.hrap.conus.gb2" | awk '{print $7}'`
+ fi
+
+ # One hour CCPA files have incorrect metadeta in the files under the "00" directory. After data is pulled, reorganize into correct valid yyyymmdd structure.
+ if [[ ${vhh_noZero} -ge 1 && ${vhh_noZero} -le 6 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/06/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -ge 7 && ${vhh_noZero} -le 12 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/12/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -ge 13 && ${vhh_noZero} -le 18 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/18/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -ge 19 && ${vhh_noZero} -le 23 ]]; then
+ wgrib2 $ccpa_raw/${vyyyymmdd_p1}/00/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -set_date -24hr -grib $ccpa_proc/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -s
+ elif [[ ${vhh_noZero} -eq 0 ]]; then
+ wgrib2 $ccpa_raw/${vyyyymmdd}/00/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -set_date -24hr -grib $ccpa_proc/${vyyyymmdd}/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 -s
+ fi
+
+ elif [[ ${accum} == "03" ]]; then
+ # Check if valid hour is 21
+ if [[ ${vhh_noZero} -ne 21 ]]; then
+ cd $ccpa_raw/${vyyyymmdd}
+ # Pull CCPA data from HPSS
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"ccpa.t${vhh}z.${accum}h.hrap.conus.gb2\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "ccpa.t${vhh}z.${accum}h.hrap.conus.gb2" | awk '{print $7}'`
+ elif [[ ${vhh_noZero} -eq 21 ]]; then
+ cd $ccpa_raw/${vyyyymmdd_p1}
+ # Pull CCPA data from HPSS
+ TarCommand="htar -xvf ${TarFile_p1} \`htar -tf ${TarFile_p1} | egrep \"ccpa.t${vhh}z.${accum}h.hrap.conus.gb2\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile_p1} `htar -tf ${TarFile_p1} | egrep "ccpa.t${vhh}z.${accum}h.hrap.conus.gb2" | awk '{print $7}'`
+ fi
+
+ if [[ ${vhh_noZero} -eq 0 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/00/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 3 || ${vhh_noZero} -eq 6 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/06/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 9 || ${vhh_noZero} -eq 12 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/12/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 15 || ${vhh_noZero} -eq 18 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/18/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 21 ]]; then
+ cp $ccpa_raw/${vyyyymmdd_p1}/00/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ fi
+
+ elif [[ ${accum} == "06" ]]; then
+ cd $ccpa_raw/${vyyyymmdd}
+ # Pull CCPA data from HPSS
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"ccpa.t${vhh}z.${accum}h.hrap.conus.gb2\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "ccpa.t${vhh}z.${accum}h.hrap.conus.gb2" | awk '{print $7}'`
+
+ if [[ ${vhh_noZero} -eq 0 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/00/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 6 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/06/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 12 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/12/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ elif [[ ${vhh_noZero} -eq 18 ]]; then
+ cp $ccpa_raw/${vyyyymmdd}/18/ccpa.t${vhh}z.${accum}h.hrap.conus.gb2 $ccpa_proc/${vyyyymmdd}
+ fi
+ fi
+ fi
+
+ # Increment to next forecast hour
+ current_fcst=$((${current_fcst} + ${accum}))
+ echo "Current fcst hr=${current_fcst}"
+
+done
+
diff --git a/scripts/exregional_get_mrms_files.sh b/scripts/exregional_get_mrms_files.sh
new file mode 100755
index 000000000..01d95043d
--- /dev/null
+++ b/scripts/exregional_get_mrms_files.sh
@@ -0,0 +1,131 @@
+#!/bin/sh
+
+# This script pulls MRMS data from the NOAA HPSS
+# Top-level MRMS directory
+mrms_dir=${OBS_DIR}/..
+if [[ ! -d "$mrms_dir" ]]; then
+ mkdir -p $mrms_dir
+fi
+
+# MRMS data from HPSS
+mrms_raw=$mrms_dir/raw
+if [[ ! -d "$mrms_raw" ]]; then
+ mkdir -p $mrms_raw
+fi
+
+# Reorganized MRMS location
+mrms_proc=$mrms_dir/proc
+if [[ ! -d "$mrms_proc" ]]; then
+ mkdir -p $mrms_proc
+fi
+
+# Initialization
+yyyymmdd=${CDATE:0:8}
+hh=${CDATE:8:2}
+cyc=$hh
+
+start_valid=${CDATE}${hh}
+
+fhr_last=`echo ${FHR} | awk '{ print $NF }'`
+
+# Forecast length
+fcst_length=${fhr_last}
+
+s_yyyy=`echo ${start_valid} | cut -c1-4` # year (YYYY) of start time
+s_mm=`echo ${start_valid} | cut -c5-6` # month (MM) of start time
+s_dd=`echo ${start_valid} | cut -c7-8` # day (DD) of start time
+s_hh=`echo ${start_valid} | cut -c9-10` # hour (HH) of start time
+start_valid_ut=`date -ud ''${s_yyyy}-${s_mm}-${s_dd}' UTC '${s_hh}':00:00' +%s` # convert start time to universal time
+
+end_fcst_sec=`expr ${fcst_length} \* 3600` # convert last forecast lead hour to seconds
+end_valid_ut=`expr ${start_valid_ut} + ${end_fcst_sec}` # calculate current forecast time in universal time
+
+cur_ut=${start_valid_ut}
+current_fcst=0
+fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds
+
+while [[ ${cur_ut} -le ${end_valid_ut} ]]; do
+ cur_time=`date -ud '1970-01-01 UTC '${cur_ut}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ echo "cur_time=${cur_time}"
+
+ # Calculate valid date info
+ vyyyy=`echo ${cur_time} | cut -c1-4` # year (YYYY) of time
+ vmm=`echo ${cur_time} | cut -c5-6` # month (MM) of time
+ vdd=`echo ${cur_time} | cut -c7-8` # day (DD) of time
+ vhh=`echo ${cur_time} | cut -c9-10` # hour (HH) of time
+ vyyyymmdd=`echo ${cur_time} | cut -c1-8` # YYYYMMDD of time
+ vinit_ut=`date -ud ''${vyyyy}-${vmm}-${vdd}' UTC '${vhh}':00:00' +%s` # convert time to universal time
+
+ # Create necessary raw and proc directories
+ if [[ ! -d "$mrms_raw/${vyyyymmdd}" ]]; then
+ mkdir -p $mrms_raw/${vyyyymmdd}
+ fi
+
+ # Check if file exists on disk; if not, pull it.
+ mrms_file="$mrms_proc/${vyyyymmdd}/MergedReflectivityQComposite_00.00_${vyyyy}${vmm}${vdd}-${vhh}0000.grib2"
+ echo "MRMS FILE:${mrms_file}"
+
+ if [[ ! -f "${mrms_file}" ]]; then
+ cd $mrms_raw/${vyyyymmdd}
+
+ # Name of MRMS tar file on HPSS is dependent on date. Logic accounts for files from 2019 until Sept. 2020.
+ if [[ ${vyyyymmdd} -ge 20190101 && ${vyyyymmdd} -lt 20200303 ]]; then
+ CheckFile=`hsi "ls -1 /NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/ldmdata.gyre.${vyyyy}${vmm}${vdd}.tar" >& /dev/null &`
+ Status=$?
+ if [[ ${Status} == 0 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/ldmdata.gyre.${vyyyy}${vmm}${vdd}.tar"
+ else
+ CheckFile=`hsi "ls -1 /NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/ldmdata.tide.${vyyyy}${vmm}${vdd}.tar" >& /dev/null &`
+ Status=$?
+ if [[ ${Status} == 0 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/ldmdata.gyre.${vyyyy}${vmm}${vdd}.tar"
+ else
+ echo "ERROR: MRMR data not available for ${vyyyy}${vmm}${vdd}!"
+ exit
+ fi
+ fi
+ fi
+
+ if [[ ${vyyyymmdd} -ge 20200303 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/dcom_prod_ldmdata_obs.tar"
+ fi
+
+ echo "TAR FILE:${TarFile}"
+
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"MergedReflectivityQComposite_00.00_${vyyyy}${vmm}${vdd}-[0-9][0-9][0-9][0-9][0-9][0-9].grib2.gz\" | awk '{print $7}'\`"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "MergedReflectivityQComposite_00.00_${vyyyy}${vmm}${vdd}-[0-9][0-9][0-9][0-9][0-9][0-9].grib2.gz" | awk '{print $7}'`
+ Status=$?
+
+ if [[ ${Status} != 0 ]]; then
+ echo "WARNING: Bad return status (${Status}) for date \"${CurDate}\". Did you forget to run \"module load hpss\"?"
+ echo "WARNING: ${TarCommand}"
+ else
+ if [[ ! -d "$mrms_proc/${vyyyymmdd}" ]]; then
+ mkdir -p $mrms_proc/${vyyyymmdd}
+ fi
+
+ hour=0
+ while [[ ${hour} -le 23 ]]; do
+ echo "hour=${hour}"
+ python ${SCRIPTSDIR}/mrms_pull_topofhour.py ${vyyyy}${vmm}${vdd}${hour} ${mrms_proc} ${mrms_raw}
+ hour=$((${hour} + 1)) # hourly increment
+ done
+ fi
+
+ else
+ # Check if file exists on disk; if not, pull it.
+ mrms_file="$mrms_proc/${vyyyymmdd}/MergedReflectivityQComposite_00.00_${vyyyy}${vmm}${vdd}-${vhh}0000.grib2"
+
+ if [[ ! -f "${mrms_file}" ]]; then
+ cd $mrms_raw/${vyyyymmdd}
+
+ python ${SCRIPTSDIR}/mrms_pull_topofhour.py ${vyyyy}${vmm}${vdd}${vhh} ${mrms_proc} ${mrms_raw}
+ fi
+ fi
+
+ # Increment
+ current_fcst=$((${current_fcst} + 1)) # hourly increment
+ fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds
+ cur_ut=`expr ${start_valid_ut} + ${fcst_sec}`
+
+done
diff --git a/scripts/exregional_get_ndas_files.sh b/scripts/exregional_get_ndas_files.sh
new file mode 100755
index 000000000..f79acf77f
--- /dev/null
+++ b/scripts/exregional_get_ndas_files.sh
@@ -0,0 +1,145 @@
+#!/bin/sh
+
+# This script reorganizes the NDAS data into a more intuitive structure:
+# A valid YYYYMMDD directory is created, and all files for the valid day are placed within the directory.
+
+# Top-level NDAS directory
+ndas_dir=${OBS_DIR}/..
+if [[ ! -d "$ndas_dir" ]]; then
+ mkdir -p $ndas_dir
+fi
+
+# NDAS data from HPSS
+ndas_raw=$ndas_dir/raw
+if [[ ! -d "$ndas_raw" ]]; then
+ mkdir -p $ndas_raw
+fi
+
+# Reorganized NDAS location
+ndas_proc=$ndas_dir/proc
+if [[ ! -d "$ndas_proc" ]]; then
+ mkdir -p $ndas_proc
+fi
+
+# Initialization
+yyyymmdd=${CDATE:0:8}
+hh=${CDATE:8:2}
+cyc=$hh
+
+init=${CDATE}${hh}
+
+# Forecast length
+fhr_last=`echo ${FHR} | awk '{ print $NF }'`
+
+fcst_length=${fhr_last}
+
+current_fcst=00
+while [[ ${current_fcst} -le ${fcst_length} ]]; do
+ fcst_sec=`expr ${current_fcst} \* 3600` # convert forecast lead hour to seconds
+ yyyy=`echo ${init} | cut -c1-4` # year (YYYY) of initialization time
+ mm=`echo ${init} | cut -c5-6` # month (MM) of initialization time
+ dd=`echo ${init} | cut -c7-8` # day (DD) of initialization time
+ hh=`echo ${init} | cut -c9-10` # hour (HH) of initialization time
+ init_ut=`date -ud ''${yyyy}-${mm}-${dd}' UTC '${hh}':00:00' +%s` # convert initialization time to universal time
+ vdate_ut=`expr ${init_ut} + ${fcst_sec}` # calculate current forecast time in universal time
+ vdate=`date -ud '1970-01-01 UTC '${vdate_ut}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd=`echo ${vdate} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy=`echo ${vdate} | cut -c1-4` # year (YYYY) of valid time
+ vmm=`echo ${vdate} | cut -c5-6` # month (MM) of valid time
+ vdd=`echo ${vdate} | cut -c7-8` # day (DD) of valid time
+ vhh=`echo ${vdate} | cut -c9-10` # forecast hour (HH)
+
+echo "yyyy mm dd hh= $yyyy $mm $dd $hh"
+echo "vyyyy vmm vdd vhh= $vyyyy $vmm $vdd $vhh"
+
+ vdate_ut_m1h=`expr ${vdate_ut} - 3600` # calculate current forecast time in universal time
+ vdate_m1h=`date -ud '1970-01-01 UTC '${vdate_ut_m1h}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd_m1h=`echo ${vdate_m1h} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy_m1h=`echo ${vdate_m1h} | cut -c1-4` # year (YYYY) of valid time
+ vmm_m1h=`echo ${vdate_m1h} | cut -c5-6` # month (MM) of valid time
+ vdd_m1h=`echo ${vdate_m1h} | cut -c7-8` # day (DD) of valid time
+ vhh_m1h=`echo ${vdate_m1h} | cut -c9-10` # forecast hour (HH)
+
+ vdate_ut_m2h=`expr ${vdate_ut} - 7200` # calculate current forecast time in universal time
+ vdate_m2h=`date -ud '1970-01-01 UTC '${vdate_ut_m2h}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd_m2h=`echo ${vdate_m2h} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy_m2h=`echo ${vdate_m2h} | cut -c1-4` # year (YYYY) of valid time
+ vmm_m2h=`echo ${vdate_m2h} | cut -c5-6` # month (MM) of valid time
+ vdd_m2h=`echo ${vdate_m2h} | cut -c7-8` # day (DD) of valid time
+ vhh_m2h=`echo ${vdate_m2h} | cut -c9-10` # forecast hour (HH)
+
+ vdate_ut_m3h=`expr ${vdate_ut} - 10800` # calculate current forecast time in universal time
+ vdate_m3h=`date -ud '1970-01-01 UTC '${vdate_ut_m3h}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd_m3h=`echo ${vdate_m3h} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy_m3h=`echo ${vdate_m3h} | cut -c1-4` # year (YYYY) of valid time
+ vmm_m3h=`echo ${vdate_m3h} | cut -c5-6` # month (MM) of valid time
+ vdd_m3h=`echo ${vdate_m3h} | cut -c7-8` # day (DD) of valid time
+ vhh_m3h=`echo ${vdate_m3h} | cut -c9-10` # forecast hour (HH)
+
+ vdate_ut_m4h=`expr ${vdate_ut} - 14400` # calculate current forecast time in universal time
+ vdate_m4h=`date -ud '1970-01-01 UTC '${vdate_ut_m4h}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd_m4h=`echo ${vdate_m4h} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy_m4h=`echo ${vdate_m4h} | cut -c1-4` # year (YYYY) of valid time
+ vmm_m4h=`echo ${vdate_m4h} | cut -c5-6` # month (MM) of valid time
+ vdd_m4h=`echo ${vdate_m4h} | cut -c7-8` # day (DD) of valid time
+ vhh_m4h=`echo ${vdate_m4h} | cut -c9-10` # forecast hour (HH)
+
+ vdate_ut_m5h=`expr ${vdate_ut} - 18000` # calculate current forecast time in universal time
+ vdate_m5h=`date -ud '1970-01-01 UTC '${vdate_ut_m5h}' seconds' +%Y%m%d%H` # convert universal time to standard time
+ vyyyymmdd_m5h=`echo ${vdate_m5h} | cut -c1-8` # forecast time (YYYYMMDD)
+ vyyyy_m5h=`echo ${vdate_m5h} | cut -c1-4` # year (YYYY) of valid time
+ vmm_m5h=`echo ${vdate_m5h} | cut -c5-6` # month (MM) of valid time
+ vdd_m5h=`echo ${vdate_m5h} | cut -c7-8` # day (DD) of valid time
+ vhh_m5h=`echo ${vdate_m5h} | cut -c9-10` # forecast hour (HH)
+
+ vhh_noZero=$(expr ${vhh} + 0)
+
+echo "vyyyymmdd_m1h vhh_m1h=$vyyyymmdd_m1h $vhh_m1h"
+echo "vhh_noZero=$vhh_noZero"
+
+ # Check if file exists on disk
+ ndas_file="$ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh}"
+ echo "NDAS PB FILE:${ndas_file}"
+
+ if [[ ! -f "${ndas_file}" ]]; then
+ if [[ ! -d "$ndas_raw/${vyyyymmdd}${vhh}" ]]; then
+ mkdir -p $ndas_raw/${vyyyymmdd}${vhh}
+ fi
+ cd $ndas_raw/${vyyyymmdd}${vhh}
+
+ # Name of NDAS tar file on HPSS is dependent on date. Logic accounts for files from 2019 until July 2020.
+ if [[ ${vyyyymmdd} -ge 20190101 && ${vyyyymmdd} -le 20190820 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/com2_nam_prod_nam.${vyyyy}${vmm}${vdd}${vhh}.bufr.tar"
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"prepbufr.tm[0-9][0-9].nr\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "prepbufr.tm[0-9][0-9].nr" | awk '{print $7}'`
+ elif [[ ${vyyyymmdd} -ge 20190821 && ${vyyyymmdd} -le 20200226 ]]; then
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/gpfs_dell1_nco_ops_com_nam_prod_nam.${vyyyy}${vmm}${vdd}${vhh}.bufr.tar"
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"prepbufr.tm[0-9][0-9].nr\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "prepbufr.tm[0-9][0-9].nr" | awk '{print $7}'`
+ else
+ TarFile="/NCEPPROD/hpssprod/runhistory/rh${vyyyy}/${vyyyy}${vmm}/${vyyyy}${vmm}${vdd}/com_nam_prod_nam.${vyyyy}${vmm}${vdd}${vhh}.bufr.tar"
+ TarCommand="htar -xvf ${TarFile} \`htar -tf ${TarFile} | egrep \"prepbufr.tm[0-9][0-9].nr\" | awk '{print $7}'\`"
+ echo "CALLING: ${TarCommand}"
+ htar -xvf ${TarFile} `htar -tf ${TarFile} | egrep "prepbufr.tm[0-9][0-9].nr" | awk '{print $7}'`
+ fi
+
+ if [[ ! -d "$ndas_proc" ]]; then
+ mkdir -p $ndas_proc
+ fi
+
+ if [[ ${vhh_noZero} -eq 0 || ${vhh} -eq 6 || ${vhh} -eq 12 || ${vhh} -eq 18 ]]; then
+ #echo "$ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm00.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh}"
+ cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm00.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd}${vhh}
+ cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm01.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m1h}${vhh_m1h}
+ cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm02.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m2h}${vhh_m2h}
+ cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm03.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m3h}${vhh_m3h}
+ cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm04.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m4h}${vhh_m4h}
+ cp $ndas_raw/${vyyyymmdd}${vhh}/nam.t${vhh}z.prepbufr.tm05.nr $ndas_proc/prepbufr.ndas.${vyyyymmdd_m5h}${vhh_m5h}
+ fi
+ fi
+ current_fcst=$((${current_fcst} + 6))
+ echo "new fcst=${current_fcst}"
+
+done
diff --git a/scripts/exregional_run_gridstatvx.sh b/scripts/exregional_run_gridstatvx.sh
new file mode 100755
index 000000000..ac0c525d5
--- /dev/null
+++ b/scripts/exregional_run_gridstatvx.sh
@@ -0,0 +1,177 @@
+#!/bin/sh -l
+set -x
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u +x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that runs METplus for grid-stat on
+the UPP output files by initialization time for all forecast hours.
+========================================================================"
+
+#
+#-----------------------------------------------------------------------
+#
+# Specify the set of valid argument names for this script/function.
+# Then process the arguments provided to this script/function (which
+# should consist of a set of name-value pairs of the form arg1="value1",
+# etc).
+#
+#-----------------------------------------------------------------------
+#
+valid_args=( "cycle_dir" "postprd_dir" "vx_dir" "gridstat_dir" )
+process_args valid_args "$@"
+#
+#-----------------------------------------------------------------------
+#
+# For debugging purposes, print out values of arguments passed to this
+# script. Note that these will be printed out only if VERBOSE is set to
+# TRUE.
+#
+#-----------------------------------------------------------------------
+#
+print_input_args valid_args
+
+#-----------------------------------------------------------------------
+#
+# Remove any files from previous runs and stage necessary files in gridstat_dir.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "$VERBOSE" "Starting grid-stat verification"
+
+cd ${gridstat_dir}
+
+#
+#-----------------------------------------------------------------------
+#
+# Get the cycle date and hour (in formats of yyyymmdd and hh, respect-
+# ively) from CDATE. Also read in FHR and create a comma-separated list
+# for METplus to run over.
+#
+#-----------------------------------------------------------------------
+#
+yyyymmdd=${CDATE:0:8}
+hh=${CDATE:8:2}
+cyc=$hh
+export CDATE
+export hh
+
+fhr_last=`echo ${FHR} | awk '{ print $NF }'`
+export fhr_last
+
+fhr_list=`echo ${FHR} | sed "s/ /,/g"`
+export fhr_list
+
+#
+#-----------------------------------------------------------------------
+#
+# Check for existence of top-level OBS_DIR
+#
+#-----------------------------------------------------------------------
+#
+if [[ ! -d "$OBS_DIR" ]]; then
+ print_err_msg_exit "\
+ Exiting: OBS_DIR does not exist."
+ exit
+fi
+
+#
+#-----------------------------------------------------------------------
+#
+# Export some environment variables passed in by the XML
+#
+#-----------------------------------------------------------------------
+#
+export SCRIPTSDIR
+export EXPTDIR
+export MET_INSTALL_DIR
+export METPLUS_PATH
+export METPLUS_CONF
+export MET_CONFIG
+export OBS_DIR
+export VAR
+export MODEL
+export NET
+
+#
+#-----------------------------------------------------------------------
+#
+# Run METplus
+#
+#-----------------------------------------------------------------------
+#
+if [ ${VAR} == "APCP" ]; then
+ export acc="${ACCUM}h" # for stats output prefix in GridStatConfig
+ ${METPLUS_PATH}/ush/master_metplus.py \
+ -c ${METPLUS_CONF}/common.conf \
+ -c ${METPLUS_CONF}/${VAR}_${acc}.conf
+elif [ ${VAR} == "REFC" ]; then
+ ${METPLUS_PATH}/ush/master_metplus.py \
+ -c ${METPLUS_CONF}/common.conf \
+ -c ${METPLUS_CONF}/${VAR}.conf
+else
+ echo "No variable defined"
+fi
+
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+METplus grid-stat completed successfully.
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/exregional_run_pointstatvx.sh b/scripts/exregional_run_pointstatvx.sh
new file mode 100755
index 000000000..f06bb3c23
--- /dev/null
+++ b/scripts/exregional_run_pointstatvx.sh
@@ -0,0 +1,161 @@
+#!/bin/sh -l
+set -x
+
+#
+#-----------------------------------------------------------------------
+#
+# Source the variable definitions file and the bash utility functions.
+#
+#-----------------------------------------------------------------------
+#
+. ${GLOBAL_VAR_DEFNS_FP}
+. $USHDIR/source_util_funcs.sh
+#
+#-----------------------------------------------------------------------
+#
+# Save current shell options (in a global array). Then set new options
+# for this script/function.
+#
+#-----------------------------------------------------------------------
+#
+{ save_shell_opts; set -u +x; } > /dev/null 2>&1
+#
+#-----------------------------------------------------------------------
+#
+# Get the full path to the file in which this script/function is located
+# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in
+# which the file is located (scrfunc_dir).
+#
+#-----------------------------------------------------------------------
+#
+scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" )
+scrfunc_fn=$( basename "${scrfunc_fp}" )
+scrfunc_dir=$( dirname "${scrfunc_fp}" )
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating entry into script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+Entering script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+
+This is the ex-script for the task that runs METplus for point-stat on
+the UPP output files by initialization time for all forecast hours.
+========================================================================"
+
+#
+#-----------------------------------------------------------------------
+#
+# Specify the set of valid argument names for this script/function.
+# Then process the arguments provided to this script/function (which
+# should consist of a set of name-value pairs of the form arg1="value1",
+# etc).
+#
+#-----------------------------------------------------------------------
+#
+valid_args=( "cycle_dir" "postprd_dir" "vx_dir" "pointstat_dir" )
+process_args valid_args "$@"
+#
+#-----------------------------------------------------------------------
+#
+# For debugging purposes, print out values of arguments passed to this
+# script. Note that these will be printed out only if VERBOSE is set to
+# TRUE.
+#
+#-----------------------------------------------------------------------
+#
+print_input_args valid_args
+#-----------------------------------------------------------------------
+#
+# Remove any files from previous runs and stage necessary files in pointstat_dir.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "$VERBOSE" "Starting point-stat verification"
+
+cd ${pointstat_dir}
+
+#
+#-----------------------------------------------------------------------
+#
+# Get the cycle date and hour (in formats of yyyymmdd and hh, respect-
+# ively) from CDATE. Also read in FHR and create a comma-separated list
+# for METplus to run over.
+#
+#-----------------------------------------------------------------------
+#
+yyyymmdd=${CDATE:0:8}
+hh=${CDATE:8:2}
+cyc=$hh
+export CDATE
+export hh
+
+fhr_last=`echo ${FHR} | awk '{ print $NF }'`
+export fhr_last
+
+fhr_list=`echo ${FHR} | sed "s/ /,/g"`
+export fhr_list
+
+#
+#-----------------------------------------------------------------------
+#
+# Check for existence of top-level OBS_DIR
+#
+#-----------------------------------------------------------------------
+#
+if [[ ! -d "$OBS_DIR" ]]; then
+ print_err_msg_exit "\
+ Exiting: OBS_DIR does not exist."
+fi
+
+#
+#-----------------------------------------------------------------------
+#
+# Export some environment variables passed in by the XML and run METplus
+#
+#-----------------------------------------------------------------------
+#
+export EXPTDIR
+export MET_INSTALL_DIR
+export METPLUS_PATH
+export METPLUS_CONF
+export MET_CONFIG
+export OBS_DIR
+export MODEL
+export NET
+
+${METPLUS_PATH}/ush/master_metplus.py \
+ -c ${METPLUS_CONF}/common.conf \
+ -c ${METPLUS_CONF}/PointStat_conus_sfc.conf
+
+${METPLUS_PATH}/ush/master_metplus.py \
+ -c ${METPLUS_CONF}/common.conf \
+ -c ${METPLUS_CONF}/PointStat_upper_air.conf
+
+#
+#-----------------------------------------------------------------------
+#
+# Print message indicating successful completion of script.
+#
+#-----------------------------------------------------------------------
+#
+print_info_msg "
+========================================================================
+METplus point-stat completed successfully.
+
+Exiting script: \"${scrfunc_fn}\"
+In directory: \"${scrfunc_dir}\"
+========================================================================"
+#
+#-----------------------------------------------------------------------
+#
+# Restore the shell options saved at the beginning of this script/func-
+# tion.
+#
+#-----------------------------------------------------------------------
+#
+{ restore_shell_opts; } > /dev/null 2>&1
diff --git a/scripts/mrms_pull_topofhour.py b/scripts/mrms_pull_topofhour.py
new file mode 100644
index 000000000..072441643
--- /dev/null
+++ b/scripts/mrms_pull_topofhour.py
@@ -0,0 +1,103 @@
+import sys, os, shutil, subprocess
+import datetime
+import re, csv, glob
+import bisect
+import numpy as np
+
+
+# Copy and unzip MRMS files that are closest to top of hour
+# Done every hour on a 20-minute lag
+
+# Include option to define valid time on command line
+# Used to backfill verification
+#try:
+valid_time = str(sys.argv[1])
+
+YYYY = int(valid_time[0:4])
+MM = int(valid_time[4:6])
+DD = int(valid_time[6:8])
+HH = int(valid_time[8:19])
+
+valid = datetime.datetime(YYYY,MM,DD,HH,0,0)
+
+#except IndexError:
+# valid_time = None
+
+
+# Default to current hour if not defined on command line
+#if valid_time is None:
+# now = datetime.datetime.utcnow()
+# YYYY = int(now.strftime('%Y'))
+# MM = int(now.strftime('%m'))
+# DD = int(now.strftime('%d'))
+# HH = int(now.strftime('%H'))
+
+# valid = datetime.datetime(YYYY,MM,DD,HH,0,0)
+# valid_time = valid.strftime('%Y%m%d%H')
+
+
+print('Pulling '+valid_time+' MRMS data')
+
+
+
+
+# Set up working directory
+DATA_HEAD = str(sys.argv[2])
+MRMS_PROD_DIR = str(sys.argv[3])
+
+VALID_DIR = os.path.join(DATA_HEAD,valid.strftime('%Y%m%d'))
+if not os.path.exists(VALID_DIR):
+ os.makedirs(VALID_DIR)
+os.chdir(DATA_HEAD)
+
+
+
+# Copy and unzip the following MRMS products
+#MRMS_PRODUCTS = ['MergedReflectivityQCComposite','SeamlessHSR','EchoTop']
+MRMS_PRODUCTS = ['MergedReflectivityQComposite']
+
+for MRMS_PRODUCT in MRMS_PRODUCTS:
+
+ if MRMS_PRODUCT == 'MergedReflectivityQComposite':
+ level = '_00.00_'
+ #elif MRMS_PRODUCT == 'SeamlessHSR':
+ #level = '_00.00_'
+ #elif MRMS_PRODUCT == 'EchoTop':
+ #level = '_18_00.50_'
+
+ # Sort list of files for each MRMS product
+ print(valid.strftime('%Y%m%d'))
+ if valid.strftime('%Y%m%d') < '20200304':
+ search_path = MRMS_PROD_DIR+'/'+valid.strftime('%Y%m%d')+'/dcom/us007003/ldmdata/obs/upperair/mrms/conus/'+MRMS_PRODUCT+'/'+MRMS_PRODUCT+'*.gz'
+ elif valid.strftime('%Y%m%d') >= '20200304':
+ search_path = MRMS_PROD_DIR+'/'+valid.strftime('%Y%m%d')+'/upperair/mrms/conus/'+MRMS_PRODUCT+'/'+MRMS_PRODUCT+'*.gz'
+ file_list = [f for f in glob.glob(search_path)]
+ time_list = [file_list[x][-24:-9] for x in range(len(file_list))]
+ int_list = [int(time_list[x][0:8]+time_list[x][9:15]) for x in range(len(time_list))]
+ int_list.sort()
+ datetime_list = [datetime.datetime.strptime(str(x),"%Y%m%d%H%M%S") for x in int_list]
+
+ # Find the MRMS file closest to the valid time
+ i = bisect.bisect_left(datetime_list,valid)
+ closest_timestamp = min(datetime_list[max(0, i-1): i+2], key=lambda date: abs(valid - date))
+
+ # Check to make sure closest file is within +/- 15 mins of top of the hour
+ # Copy and rename the file for future ease
+ difference = abs(closest_timestamp - valid)
+ if difference.total_seconds() <= 900:
+ filename1 = MRMS_PRODUCT+level+closest_timestamp.strftime('%Y%m%d-%H%M%S')+'.grib2.gz'
+ filename2 = MRMS_PRODUCT+level+valid.strftime('%Y%m%d-%H')+'0000.grib2.gz'
+
+ if valid.strftime('%Y%m%d') < '20200304':
+ print('cp '+MRMS_PROD_DIR+'/'+valid.strftime('%Y%m%d')+'/dcom/us007003/ldmdata/obs/upperair/mrms/conus/'+MRMS_PRODUCT+'/'+filename1+' '+VALID_DIR+'/'+filename2)
+
+ os.system('cp '+MRMS_PROD_DIR+'/'+valid.strftime('%Y%m%d')+'/dcom/us007003/ldmdata/obs/upperair/mrms/conus/'+MRMS_PRODUCT+'/'+filename1+' '+VALID_DIR+'/'+filename2)
+ os.system('gunzip '+VALID_DIR+'/'+filename2)
+ elif valid.strftime('%Y%m%d') >= '20200304':
+ print('cp '+MRMS_PROD_DIR+'/'+valid.strftime('%Y%m%d')+'/upperair/mrms/conus/'+MRMS_PRODUCT+'/'+filename1+' '+VALID_DIR+'/'+filename2)
+
+ os.system('cp '+MRMS_PROD_DIR+'/'+valid.strftime('%Y%m%d')+'/upperair/mrms/conus/'+MRMS_PRODUCT+'/'+filename1+' '+VALID_DIR+'/'+filename2)
+ os.system('gunzip '+VALID_DIR+'/'+filename2)
+ else:
+ print('No '+MRMS_PRODUCT+' file found within 15 minutes of '+valid.strftime('%HZ %m/%d/%Y')+'. Skipping this time.')
+
diff --git a/ush/config.community.sh b/ush/config.community.sh
index 34560bee0..a1000ed90 100644
--- a/ush/config.community.sh
+++ b/ush/config.community.sh
@@ -26,6 +26,22 @@ FV3GFS_FILE_FMT_LBCS="grib2"
WTIME_RUN_FCST="01:00:00"
+MODEL="FV3_GFS_v15p2_CONUS_25km"
+METPLUS_PATH="path/to/METPlus"
+MET_INSTALL_DIR="path/to/MET"
+CCPA_OBS_DIR="/path/to/processed/CCPA/data"
+MRMS_OBS_DIR="/path/to/processed/MRMS/data"
+NDAS_OBS_DIR="/path/to/processed/NDAS/data"
+
+RUN_TASK_MAKE_GRID="TRUE"
+RUN_TASK_MAKE_OROG="TRUE"
+RUN_TASK_MAKE_SFC_CLIMO="TRUE"
+RUN_TASK_GET_OBS_CCPA="FALSE"
+RUN_TASK_GET_OBS_MRMS="FALSE"
+RUN_TASK_GET_OBS_NDAS="FALSE"
+RUN_TASK_VX_GRIDSTAT="FALSE"
+RUN_TASK_VX_POINTSTAT="FALSE"
+
#
# Uncomment the following line in order to use user-staged external model
# files with locations and names as specified by EXTRN_MDL_SOURCE_BASEDIR_ICS/
@@ -38,6 +54,6 @@ WTIME_RUN_FCST="01:00:00"
#
EXTRN_MDL_SOURCE_BASEDIR_ICS="/scratch2/BMC/det/UFS_SRW_app/v1p0/model_data/FV3GFS"
EXTRN_MDL_FILES_ICS=( "gfs.pgrb2.0p25.f000" )
-EXTRN_MDL_SOURCE_BASEDIR_LBCS="/scratch2/BMC/det/UFS_SRW_app/v1p0/model_data/FV3GFS"
+EXTRN_MDL_SOURCE_BASEDIR_LBCS="/path/to/model_data/FV3GFS"
EXTRN_MDL_FILES_LBCS=( "gfs.pgrb2.0p25.f006" "gfs.pgrb2.0p25.f012" "gfs.pgrb2.0p25.f018" "gfs.pgrb2.0p25.f024" \
"gfs.pgrb2.0p25.f030" "gfs.pgrb2.0p25.f036" "gfs.pgrb2.0p25.f042" "gfs.pgrb2.0p25.f048" )
diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh
index 44c115623..f0336561d 100644
--- a/ush/config_defaults.sh
+++ b/ush/config_defaults.sh
@@ -394,6 +394,104 @@ SUB_HOURLY_POST="FALSE"
DT_SUBHOURLY_POST_MNTS="00"
#-----------------------------------------------------------------------
#
+# Set METplus parameters. Definitions:
+#
+# MODEL:
+# String that specifies a descriptive name for the model being verified.
+#
+# MET_INSTALL_DIR:
+# Location to top-level directory of MET installation.
+#
+# METPLUS_PATH:
+# Location to top-level directory of METplus installation.
+#
+# CCPA_OBS_DIR:
+# User-specified location of top-level directory where CCPA hourly
+# precipitation files used by METplus are located. This parameter needs
+# to be set for both user-provided observations and for observations
+# that are retrieved from the NOAA HPSS (if the user has access) via
+# the get_obs_ccpa_tn task (activated in workflow by setting
+# RUN_TASK_GET_OBS_CCPA="TRUE"). In the case of pulling observations
+# directly from NOAA HPSS, the data retrieved will be placed in this
+# directory. Please note, this path must be defind as
+# /full-path-to-obs/ccpa/proc. METplus is configured to verify 01-,
+# 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files.
+# METplus configuration files require the use of predetermined directory
+# structure and file names. Therefore, if the CCPA files are user
+# provided, they need to follow the anticipated naming structure:
+# {YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2, where YYYY is the 4-digit
+# valid year, MM the 2-digit valid month, DD the 2-digit valid day of
+# the month, and HH the 2-digit valid hour of the day. In addition, a
+# caveat is noted for using hourly CCPA data. There is a problem with
+# the valid time in the metadata for files valid from 19 - 00 UTC (or
+# files under the '00' directory). The script to pull the CCPA data
+# from the NOAA HPSS has an example of how to account for this as well
+# as organizing the data into a more intuitive format:
+# regional_workflow/scripts/exregional_get_ccpa_files.sh. When a fix
+# is provided, it will be accounted for in the
+# exregional_get_ccpa_files.sh script.
+#
+# MRMS_OBS_DIR:
+# User-specified location of top-level directory where MRMS composite
+# reflectivity files used by METplus are located. This parameter needs
+# to be set for both user-provided observations and for observations
+# that are retrieved from the NOAA HPSS (if the user has access) via the
+# get_obs_mrms_tn task (activated in workflow by setting
+# RUN_TASK_GET_OBS_MRMS="TRUE"). In the case of pulling observations
+# directly from NOAA HPSS, the data retrieved will be placed in this
+# directory. Please note, this path must be defind as
+# /full-path-to-obs/mrms/proc. METplus configuration files require the
+# use of predetermined directory structure and file names. Therefore, if
+# the MRMS files are user provided, they need to follow the anticipated
+# naming structure:
+# {YYYYMMDD}/MergedReflectivityQComposite_00.00_{YYYYMMDD}-{HH}{mm}{SS}.grib2,
+# where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD
+# the 2-digit valid day of the month, HH the 2-digit valid hour of the
+# day, mm the 2-digit valid minutes of the hour, and SS is the two-digit
+# valid seconds of the hour. In addition, METplus is configured to look
+# for a MRMS composite reflectivity file for the valid time of the
+# forecast being verified; since MRMS composite reflectivity files do
+# not always exactly match the valid time, a script, within the main
+# script to retrieve MRMS data from the NOAA HPSS, is used to identify
+# and rename the MRMS composite reflectivity file to match the valid
+# time of the forecast. The script to pull the MRMS data from the NOAA
+# HPSS has an example of the expected file naming structure:
+# regional_workflow/scripts/exregional_get_mrms_files.sh. This script
+# calls the script used to identify the MRMS file closest to the valid
+# time: regional_workflow/scripts/mrms_pull_topofhour.py.
+#
+# NDAS_OBS_DIR:
+# User-specified location of top-level directory where NDAS prepbufr
+# files used by METplus are located. This parameter needs to be set for
+# both user-provided observations and for observations that are
+# retrieved from the NOAA HPSS (if the user has access) via the
+# get_obs_ndas_tn task (activated in workflow by setting
+# RUN_TASK_GET_OBS_NDAS="TRUE"). In the case of pulling observations
+# directly from NOAA HPSS, the data retrieved will be placed in this
+# directory. Please note, this path must be defind as
+# /full-path-to-obs/ndas/proc. METplus is configured to verify
+# near-surface variables hourly and upper-air variables at times valid
+# at 00 and 12 UTC with NDAS prepbufr files. METplus configuration files
+# require the use of predetermined file names. Therefore, if the NDAS
+# files are user provided, they need to follow the anticipated naming
+# structure: prepbufr.ndas.{YYYYMMDDHH}, where YYYY is the 4-digit valid
+# year, MM the 2-digit valid month, DD the 2-digit valid day of the
+# month, and HH the 2-digit valid hour of the day. The script to pull
+# the NDAS data from the NOAA HPSS has an example of how to rename the
+# NDAS data into a more intuitive format with the valid time listed in
+# the file name: regional_workflow/scripts/exregional_get_ndas_files.sh
+#
+#-----------------------------------------------------------------------
+#
+MODEL=""
+MET_INSTALL_DIR="/path/to/MET"
+METPLUS_PATH="/path/to/METPlus"
+CCPA_OBS_DIR="/path/to/observation-directory/ccpa/proc"
+MRMS_OBS_DIR="/path/to/observation-directory/mrms/proc"
+NDAS_OBS_DIR="/path/to/observation-directory/ndas/proc"
+#
+#-----------------------------------------------------------------------
+#
# Set initial and lateral boundary condition generation parameters.
# Definitions:
#
@@ -501,7 +599,6 @@ EXTRN_MDL_FILES_LBCS=( "LBCS_file1" "LBCS_file2" "..." )
# directory or the cycle directories under it.
#
#-----------------------------------------------------------------------
-#
CCPP_PHYS_SUITE="FV3_GFS_v15p2"
#
#-----------------------------------------------------------------------
@@ -1009,6 +1106,14 @@ VERBOSE="TRUE"
# SFC_CLIMO_DIR:
# Same as GRID_DIR but for the surface climatology generation task.
#
+# RUN_TASK_VX_GRIDSTAT:
+# Flag that determines whether the grid-stat verification task is to be
+# run.
+#
+# RUN_TASK_VX_POINTSTAT:
+# Flag that determines whether the point-stat verification task is to be
+# run.
+#
#-----------------------------------------------------------------------
#
RUN_TASK_MAKE_GRID="TRUE"
@@ -1019,6 +1124,17 @@ OROG_DIR="/path/to/pregenerated/orog/files"
RUN_TASK_MAKE_SFC_CLIMO="TRUE"
SFC_CLIMO_DIR="/path/to/pregenerated/surface/climo/files"
+
+RUN_TASK_GET_OBS_CCPA="FALSE"
+
+RUN_TASK_GET_OBS_MRMS="FALSE"
+
+RUN_TASK_GET_OBS_NDAS="FALSE"
+
+RUN_TASK_VX_GRIDSTAT="FALSE"
+
+RUN_TASK_VX_POINTSTAT="FALSE"
+
#
#-----------------------------------------------------------------------
#
@@ -1215,6 +1331,17 @@ MAKE_ICS_TN="make_ics"
MAKE_LBCS_TN="make_lbcs"
RUN_FCST_TN="run_fcst"
RUN_POST_TN="run_post"
+GET_OBS="get_obs"
+GET_OBS_CCPA_TN="get_obs_ccpa"
+GET_OBS_MRMS_TN="get_obs_mrms"
+GET_OBS_NDAS_TN="get_obs_ndas"
+VX_TN="run_vx"
+VX_GRIDSTAT_TN="run_gridstatvx"
+VX_GRIDSTAT_REFC_TN="run_gridstatvx_refc"
+VX_GRIDSTAT_03h_TN="run_gridstatvx_03h"
+VX_GRIDSTAT_06h_TN="run_gridstatvx_06h"
+VX_GRIDSTAT_24h_TN="run_gridstatvx_24h"
+VX_POINTSTAT_TN="run_pointstatvx"
#
# Number of nodes.
#
@@ -1227,6 +1354,11 @@ NNODES_MAKE_ICS="4"
NNODES_MAKE_LBCS="4"
NNODES_RUN_FCST="" # This is calculated in the workflow generation scripts, so no need to set here.
NNODES_RUN_POST="2"
+NNODES_GET_OBS_CCPA="1"
+NNODES_GET_OBS_MRMS="1"
+NNODES_GET_OBS_NDAS="1"
+NNODES_VX_GRIDSTAT="1"
+NNODES_VX_POINTSTAT="1"
#
# Number of MPI processes per node.
#
@@ -1239,6 +1371,11 @@ PPN_MAKE_ICS="12"
PPN_MAKE_LBCS="12"
PPN_RUN_FCST="24" # This may have to be changed depending on the number of threads used.
PPN_RUN_POST="24"
+PPN_GET_OBS_CCPA="1"
+PPN_GET_OBS_MRMS="1"
+PPN_GET_OBS_NDAS="1"
+PPN_VX_GRIDSTAT="1"
+PPN_VX_POINTSTAT="1"
#
# Walltimes.
#
@@ -1251,6 +1388,11 @@ WTIME_MAKE_ICS="00:30:00"
WTIME_MAKE_LBCS="00:30:00"
WTIME_RUN_FCST="04:30:00"
WTIME_RUN_POST="00:15:00"
+WTIME_GET_OBS_CCPA="00:45:00"
+WTIME_GET_OBS_MRMS="00:45:00"
+WTIME_GET_OBS_NDAS="02:00:00"
+WTIME_VX_GRIDSTAT="02:00:00"
+WTIME_VX_POINTSTAT="01:00:00"
#
# Maximum number of attempts.
#
@@ -1263,6 +1405,15 @@ MAXTRIES_MAKE_ICS="1"
MAXTRIES_MAKE_LBCS="1"
MAXTRIES_RUN_FCST="1"
MAXTRIES_RUN_POST="1"
+MAXTRIES_GET_OBS_CCPA="1"
+MAXTRIES_GET_OBS_MRMS="1"
+MAXTRIES_GET_OBS_NDAS="1"
+MAXTRIES_VX_GRIDSTAT="1"
+MAXTRIES_VX_GRIDSTAT_REFC="1"
+MAXTRIES_VX_GRIDSTAT_03h="1"
+MAXTRIES_VX_GRIDSTAT_06h="1"
+MAXTRIES_VX_GRIDSTAT_24h="1"
+MAXTRIES_VX_POINTSTAT="1"
#
#-----------------------------------------------------------------------
#
diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh
index 15a69392d..84c9c1fa7 100755
--- a/ush/generate_FV3LAM_wflow.sh
+++ b/ush/generate_FV3LAM_wflow.sh
@@ -184,6 +184,20 @@ settings="\
'make_lbcs_tn': ${MAKE_LBCS_TN}
'run_fcst_tn': ${RUN_FCST_TN}
'run_post_tn': ${RUN_POST_TN}
+ 'get_obs_ccpa_tn': ${GET_OBS_CCPA_TN}
+ 'get_obs_ndas_tn': ${GET_OBS_NDAS_TN}
+ 'get_obs_mrms_tn': ${GET_OBS_MRMS_TN}
+ 'vx_tn': ${VX_TN}
+ 'vx_gridstat_tn': ${VX_GRIDSTAT_TN}
+ 'vx_gridstat_refc_tn': ${VX_GRIDSTAT_REFC_TN}
+ 'vx_gridstat_03h_tn': ${VX_GRIDSTAT_03h_TN}
+ 'vx_gridstat_06h_tn': ${VX_GRIDSTAT_06h_TN}
+ 'vx_gridstat_24h_tn': ${VX_GRIDSTAT_24h_TN}
+ 'vx_pointstat_tn': ${VX_POINTSTAT_TN}
+#
+# Entity used to load the module file for each GET_OBS_* task.
+#
+ 'get_obs': ${GET_OBS}
#
# Number of nodes to use for each task.
#
@@ -196,6 +210,11 @@ settings="\
'nnodes_make_lbcs': ${NNODES_MAKE_LBCS}
'nnodes_run_fcst': ${NNODES_RUN_FCST}
'nnodes_run_post': ${NNODES_RUN_POST}
+ 'nnodes_get_obs_ccpa': ${NNODES_GET_OBS_CCPA}
+ 'nnodes_get_obs_mrms': ${NNODES_GET_OBS_MRMS}
+ 'nnodes_get_obs_ndas': ${NNODES_GET_OBS_NDAS}
+ 'nnodes_vx_gridstat': ${NNODES_VX_GRIDSTAT}
+ 'nnodes_vx_pointstat': ${NNODES_VX_POINTSTAT}
#
# Number of cores used for a task
#
@@ -214,6 +233,12 @@ settings="\
'ppn_make_lbcs': ${PPN_MAKE_LBCS}
'ppn_run_fcst': ${PPN_RUN_FCST}
'ppn_run_post': ${PPN_RUN_POST}
+ 'ppn_get_obs_ccpa': ${PPN_GET_OBS_CCPA}
+ 'ppn_get_obs_mrms': ${PPN_GET_OBS_MRMS}
+ 'ppn_get_obs_ndas': ${PPN_GET_OBS_NDAS}
+ 'ppn_vx_gridstat': ${PPN_VX_GRIDSTAT}
+ 'ppn_vx_pointstat': ${PPN_VX_POINTSTAT}
+
#
# Maximum wallclock time for each task.
#
@@ -226,6 +251,11 @@ settings="\
'wtime_make_lbcs': ${WTIME_MAKE_LBCS}
'wtime_run_fcst': ${WTIME_RUN_FCST}
'wtime_run_post': ${WTIME_RUN_POST}
+ 'wtime_get_obs_ccpa': ${WTIME_GET_OBS_CCPA}
+ 'wtime_get_obs_mrms': ${WTIME_GET_OBS_MRMS}
+ 'wtime_get_obs_ndas': ${WTIME_GET_OBS_NDAS}
+ 'wtime_vx_gridstat': ${WTIME_VX_GRIDSTAT}
+ 'wtime_vx_pointstat': ${WTIME_VX_POINTSTAT}
#
# Maximum number of tries for each task.
#
@@ -238,12 +268,26 @@ settings="\
'maxtries_make_lbcs': ${MAXTRIES_MAKE_LBCS}
'maxtries_run_fcst': ${MAXTRIES_RUN_FCST}
'maxtries_run_post': ${MAXTRIES_RUN_POST}
+ 'maxtries_get_obs_ccpa': ${MAXTRIES_GET_OBS_CCPA}
+ 'maxtries_get_obs_mrms': ${MAXTRIES_GET_OBS_MRMS}
+ 'maxtries_get_obs_ndas': ${MAXTRIES_GET_OBS_NDAS}
+ 'maxtries_vx_gridstat': ${MAXTRIES_VX_GRIDSTAT}
+ 'maxtries_vx_gridstat_refc': ${MAXTRIES_VX_GRIDSTAT_REFC}
+ 'maxtries_vx_gridstat_03h': ${MAXTRIES_VX_GRIDSTAT_03h}
+ 'maxtries_vx_gridstat_06h': ${MAXTRIES_VX_GRIDSTAT_06h}
+ 'maxtries_vx_gridstat_24h': ${MAXTRIES_VX_GRIDSTAT_24h}
+ 'maxtries_vx_pointstat': ${MAXTRIES_VX_POINTSTAT}
#
# Flags that specify whether to run the preprocessing tasks.
#
'run_task_make_grid': ${RUN_TASK_MAKE_GRID}
'run_task_make_orog': ${RUN_TASK_MAKE_OROG}
'run_task_make_sfc_climo': ${RUN_TASK_MAKE_SFC_CLIMO}
+ 'run_task_get_obs_ccpa': ${RUN_TASK_GET_OBS_CCPA}
+ 'run_task_get_obs_mrms': ${RUN_TASK_GET_OBS_MRMS}
+ 'run_task_get_obs_ndas': ${RUN_TASK_GET_OBS_NDAS}
+ 'run_task_vx_gridstat': ${RUN_TASK_VX_GRIDSTAT}
+ 'run_task_vx_pointstat': ${RUN_TASK_VX_POINTSTAT}
#
# Number of physical cores per node for the current machine.
#
@@ -253,6 +297,7 @@ settings="\
#
'jobsdir': $JOBSDIR
'logdir': $LOGDIR
+ 'scriptsdir': $SCRIPTSDIR
'cycle_basedir': ${CYCLE_BASEDIR}
'global_var_defns_fp': ${GLOBAL_VAR_DEFNS_FP}
'load_modules_run_task_fp': ${LOAD_MODULES_RUN_TASK_FP}
@@ -274,6 +319,18 @@ settings="\
#
'fcst_len_hrs': ${FCST_LEN_HRS}
#
+# METPlus-specific information
+#
+ 'model': ${MODEL}
+ 'met_install_dir': ${MET_INSTALL_DIR}
+ 'metplus_path': ${METPLUS_PATH}
+ 'vx_config_dir': ${VX_CONFIG_DIR}
+ 'metplus_conf': ${METPLUS_CONF}
+ 'met_config': ${MET_CONFIG}
+ 'ccpa_obs_dir': ${CCPA_OBS_DIR}
+ 'mrms_obs_dir': ${MRMS_OBS_DIR}
+ 'ndas_obs_dir': ${NDAS_OBS_DIR}
+#
# Ensemble-related parameters.
#
'do_ensemble': ${DO_ENSEMBLE}
diff --git a/ush/setup.sh b/ush/setup.sh
index 92b57cc31..484c62ddc 100755
--- a/ush/setup.sh
+++ b/ush/setup.sh
@@ -232,6 +232,46 @@ fi
#
#-----------------------------------------------------------------------
#
+# Make sure that RUN_TASK_VX_GRIDSTAT is set to a valid value.
+#
+#-----------------------------------------------------------------------
+#
+check_var_valid_value "RUN_TASK_VX_GRIDSTAT" "valid_vals_RUN_TASK_VX_GRIDSTAT"
+#
+# Set RUN_TASK_VX_GRIDSTAT to either "TRUE" or "FALSE" so we don't have to
+# consider other valid values later on.
+#
+RUN_TASK_VX_GRIDSTAT=${RUN_TASK_VX_GRIDSTAT^^}
+if [ "${RUN_TASK_VX_GRIDSTAT}" = "TRUE" ] || \
+ [ "${RUN_TASK_VX_GRIDSTAT}" = "YES" ]; then
+ RUN_TASK_VX_GRIDSTAT="TRUE"
+elif [ "${RUN_TASK_VX_GRIDSTAT}" = "FALSE" ] || \
+ [ "${RUN_TASK_VX_GRIDSTAT}" = "NO" ]; then
+ RUN_TASK_VX_GRIDSTAT="FALSE"
+fi
+#
+#-----------------------------------------------------------------------
+#
+# Make sure that RUN_TASK_VX_POINTSTAT is set to a valid value.
+#
+#-----------------------------------------------------------------------
+#
+check_var_valid_value "RUN_TASK_VX_POINTSTAT" "valid_vals_RUN_TASK_VX_POINTSTAT"
+#
+# Set RUN_TASK_VX_POINTSTAT to either "TRUE" or "FALSE" so we don't have to
+# consider other valid values later on.
+#
+RUN_TASK_VX_POINTSTAT=${RUN_TASK_VX_POINTSTAT^^}
+if [ "${RUN_TASK_VX_POINTSTAT}" = "TRUE" ] || \
+ [ "${RUN_TASK_VX_POINTSTAT}" = "YES" ]; then
+ RUN_TASK_VX_POINTSTAT="TRUE"
+elif [ "${RUN_TASK_VX_POINTSTAT}" = "FALSE" ] || \
+ [ "${RUN_TASK_VX_POINTSTAT}" = "NO" ]; then
+ RUN_TASK_VX_POINTSTAT="FALSE"
+fi
+#
+#-----------------------------------------------------------------------
+#
# Make sure that DO_SHUM is set to a valid value.
#
#-----------------------------------------------------------------------
@@ -732,7 +772,11 @@ SRC_DIR="${SR_WX_APP_TOP_DIR}/src"
PARMDIR="$HOMErrfs/parm"
MODULES_DIR="$HOMErrfs/modulefiles"
EXECDIR="${SR_WX_APP_TOP_DIR}/bin"
+FIXrrfs="$HOMErrfs/fix"
TEMPLATE_DIR="$USHDIR/templates"
+VX_CONFIG_DIR="$TEMPLATE_DIR/parm"
+METPLUS_CONF="$TEMPLATE_DIR/parm/metplus"
+MET_CONFIG="$TEMPLATE_DIR/parm/met"
case $MACHINE in
@@ -1661,6 +1705,47 @@ one above. Reset values are:
print_info_msg "$msg"
fi
+
+ if [ "${RUN_TASK_VX_GRIDSTAT}" = "TRUE" ] || \
+ [ "${RUN_TASK_VX_GRIDSTAT}" = "FALSE" ]; then
+
+ msg="
+When RUN_ENVIR is set to \"nco\", it is assumed that the verification
+will not be run.
+ RUN_TASK_VX_GRIDSTAT = \"${RUN_TASK_VX_GRIDSTAT}\"
+Resetting RUN_TASK_VX_GRIDSTAT to \"FALSE\"
+Reset value is:"
+
+ RUN_TASK_VX_GRIDSTAT="FALSE"
+
+ msg="$msg""
+ RUN_TASK_VX_GRIDSTAT = \"${RUN_TASK_VX_GRIDSTAT}\"
+"
+
+ print_info_msg "$msg"
+
+ fi
+
+ if [ "${RUN_TASK_VX_POINTSTAT}" = "TRUE" ] || \
+ [ "${RUN_TASK_VX_POINTSTAT}" = "FALSE" ]; then
+
+ msg="
+When RUN_ENVIR is set to \"nco\", it is assumed that the verification
+will not be run.
+ RUN_TASK_VX_POINTSTAT = \"${RUN_TASK_VX_POINTSTAT}\"
+Resetting RUN_TASK_VX_POINTSTAT to \"FALSE\"
+Reset value is:"
+
+ RUN_TASK_VX_POINTSTAT="FALSE"
+
+ msg="$msg""
+ RUN_TASK_VX_POINTSTAT = \"${RUN_TASK_VX_POINTSTAT}\"
+"
+
+ print_info_msg "$msg"
+
+ fi
+
#
#-----------------------------------------------------------------------
#
@@ -2492,12 +2577,16 @@ SRC_DIR="$SRC_DIR"
PARMDIR="$PARMDIR"
MODULES_DIR="${MODULES_DIR}"
EXECDIR="$EXECDIR"
+FIXrrfs="$FIXrrfs"
FIXam="$FIXam"
FIXLAM="$FIXLAM"
FIXgsm="$FIXgsm"
COMROOT="$COMROOT"
COMOUT_BASEDIR="${COMOUT_BASEDIR}"
TEMPLATE_DIR="${TEMPLATE_DIR}"
+VX_CONFIG_DIR="${VX_CONFIG_DIR}"
+METPLUS_CONF="${METPLUS_CONF}"
+MET_CONFIG="${MET_CONFIG}"
UFS_WTHR_MDL_DIR="${UFS_WTHR_MDL_DIR}"
UFS_UTILS_DIR="${UFS_UTILS_DIR}"
SFC_CLIMO_INPUT_DIR="${SFC_CLIMO_INPUT_DIR}"
diff --git a/ush/templates/FV3.input.yml b/ush/templates/FV3.input.yml
index c1f215481..09e52ca6c 100644
--- a/ush/templates/FV3.input.yml
+++ b/ush/templates/FV3.input.yml
@@ -265,7 +265,6 @@ FV3_GFS_v15p2:
fhcyc: 0.0
fhlwr: 3600.0
fhswr: 3600.0
- fhzero: 6.0
hybedmf: True
iau_delthrs: !!python/none
iaufhrs: !!python/none
@@ -418,7 +417,6 @@ FV3_CPT_v0:
fhcyc: 0.0
fhlwr: 3600.0
fhswr: 3600.0
- fhzero: 6.0
fprcp: 2
hybedmf: True
iccn: False
diff --git a/ush/templates/FV3LAM_wflow.xml b/ush/templates/FV3LAM_wflow.xml
index b42380e72..e15f3bfef 100644
--- a/ush/templates/FV3LAM_wflow.xml
+++ b/ush/templates/FV3LAM_wflow.xml
@@ -22,18 +22,33 @@ Parameters needed by the job scheduler.
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -52,10 +67,19 @@ Directories and files.
-->
+
+
+
+
+
+
+
+
+
+
+
+
+ &RSRV_HPSS;
+ &LOAD_MODULES_RUN_TASK_FP; "&GET_OBS;" "&JOBSDIR;/JREGIONAL_GET_OBS_CCPA"
+ {{ nnodes_get_obs_ccpa }}:ppn={{ ppn_get_obs_ccpa }}
+ {{ wtime_get_obs_ccpa }}
+ &NCORES_PER_NODE;
+ &GET_OBS_CCPA_TN;
+ &LOGDIR;/&GET_OBS_CCPA_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&CCPA_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %}
+ ACCUM01
+
+
+
+
+
+
+{% endif %}
+
+{% if run_task_get_obs_mrms %}
+
+
+
+
+ &RSRV_HPSS;
+ &LOAD_MODULES_RUN_TASK_FP; "&GET_OBS;" "&JOBSDIR;/JREGIONAL_GET_OBS_MRMS"
+ {{ nnodes_get_obs_mrms }}:ppn={{ ppn_get_obs_mrms }}
+ {{ wtime_get_obs_mrms }}
+ &NCORES_PER_NODE;
+ &GET_OBS_MRMS_TN;
+ &LOGDIR;/&GET_OBS_MRMS_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&MRMS_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %}
+ SCRIPTSDIR&SCRIPTSDIR;
+ VARREFC
+
+
+
+
+
+
+{% endif %}
+
+{% if run_task_get_obs_ndas %}
+
+
+
+
+ &RSRV_HPSS;
+ &LOAD_MODULES_RUN_TASK_FP; "&GET_OBS;" "&JOBSDIR;/JREGIONAL_GET_OBS_NDAS"
+ {{ nnodes_get_obs_ndas }}:ppn={{ ppn_get_obs_ndas }}
+ {{ wtime_get_obs_ndas }}
+ &NCORES_PER_NODE;
+ &GET_OBS_NDAS_TN;
+ &LOGDIR;/&GET_OBS_NDAS_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&NDAS_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %}
+ VARREFC
+
+
+
+
+
+
+{% endif %}
+
+{% if run_task_vx_gridstat %}
+
+
+
+
+ &RSRV_HPSS;
+ &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT"
+ {{ nnodes_vx_gridstat }}:ppn={{ ppn_vx_gridstat }}
+ {{ wtime_vx_gridstat }}
+ &NCORES_PER_NODE;
+ &VX_GRIDSTAT_TN;
+ &LOGDIR;/&VX_GRIDSTAT_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&CCPA_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %}
+ VARAPCP
+ ACCUM01
+
+
+ {% if run_task_get_obs_ccpa %}
+
+ {% else %}
+
+ {% endif %}
+
+
+
+{% endif %}
+
+{% if run_task_vx_gridstat %}
+
+
+
+ &RSRV_HPSS;
+ &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT"
+ {{ nnodes_vx_gridstat }}:ppn={{ ppn_vx_gridstat }}
+ {{ wtime_vx_gridstat }}
+ &NCORES_PER_NODE;
+ &VX_GRIDSTAT_REFC_TN;
+ &LOGDIR;/&VX_GRIDSTAT_REFC_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&MRMS_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(1, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %}
+ VARREFC
+
+
+ {% if run_task_get_obs_mrms %}
+
+ {% else %}
+
+ {% endif %}
+
+
+
+{% endif %}
+
+{% if run_task_vx_gridstat %}
+
+
+
+ &RSRV_DEFAULT;
+ &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT"
+ {{ nnodes_vx_gridstat }}:ppn={{ ppn_vx_gridstat }}
+ {{ wtime_vx_gridstat }}
+ &NCORES_PER_NODE;
+ &VX_GRIDSTAT_03h_TN;
+ &LOGDIR;/&VX_GRIDSTAT_03h_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&CCPA_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(3, fcst_len_hrs+1, 3) %}{{ " %02d" % h }}{% endfor %}
+ VARAPCP
+ ACCUM03
+
+
+
+
+
+
+{% endif %}
+
+{% if run_task_vx_gridstat %}
+
+
+
+ &RSRV_DEFAULT;
+ &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT"
+ {{ nnodes_vx_gridstat }}:ppn={{ ppn_vx_gridstat }}
+ {{ wtime_vx_gridstat }}
+ &NCORES_PER_NODE;
+ &VX_GRIDSTAT_06h_TN;
+ &LOGDIR;/&VX_GRIDSTAT_06h_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&CCPA_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(6, fcst_len_hrs+1, 6) %}{{ " %02d" % h }}{% endfor %}
+ VARAPCP
+ ACCUM06
+
+
+
+
+
+
+{% endif %}
+
+{% if run_task_vx_gridstat %}
+
+
+
+ &RSRV_DEFAULT;
+ &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_GRIDSTAT"
+ {{ nnodes_vx_gridstat }}:ppn={{ ppn_vx_gridstat }}
+ {{ wtime_vx_gridstat }}
+ &NCORES_PER_NODE;
+ &VX_GRIDSTAT_24h_TN;
+ &LOGDIR;/&VX_GRIDSTAT_24h_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&CCPA_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(24, fcst_len_hrs+1, 24) %}{{ " %02d" % h }}{% endfor %}
+ VARAPCP
+ ACCUM24
+
+
+
+
+
+
+{% endif %}
+
+{% if run_task_vx_pointstat %}
+
+
+
+ &RSRV_HPSS;
+
+ &LOAD_MODULES_RUN_TASK_FP; "&VX_TN;" "&JOBSDIR;/JREGIONAL_RUN_VX_POINTSTAT"
+ {{ nnodes_vx_pointstat }}:ppn={{ ppn_vx_pointstat }}
+ {{ wtime_vx_pointstat }}
+ &NCORES_PER_NODE;
+ &VX_POINTSTAT_TN;
+ &LOGDIR;/&VX_POINTSTAT_TN;_@Y@m@d@H.log
+
+ GLOBAL_VAR_DEFNS_FP&GLOBAL_VAR_DEFNS_FP;
+ OBS_DIR&NDAS_OBS_DIR;
+ CYCLE_DIR&CYCLE_BASEDIR;/@Y@m@d@H
+ CDATE@Y@m@d@H
+ PDY@Y@m@d
+ cyc@H
+ FHR {% for h in range(0, fcst_len_hrs+1) %}{{ " %02d" % h }}{% endfor %}
+
+
+ {% if run_task_get_obs_ndas %}
+
+ {% else %}
+
+ {% endif %}
+
+
+
+
+{% endif %}
{% if do_ensemble %}
{% endif %}
-
diff --git a/ush/templates/parm/met/GridStatConfig_APCP b/ush/templates/parm/met/GridStatConfig_APCP
new file mode 100755
index 000000000..55fbd3e97
--- /dev/null
+++ b/ush/templates/parm/met/GridStatConfig_APCP
@@ -0,0 +1,178 @@
+////////////////////////////////////////////////////////////////////////////////
+//
+// Grid-Stat configuration file.
+//
+// For additional information, see the MET_BASE/config/README file.
+//
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Output model name to be written
+//
+model = "${MODEL}";
+
+//
+// Output description to be written
+// May be set separately in each "obs.field" entry
+//
+desc = "NA";
+
+//
+// Output observation type to be written
+//
+obtype = "${OBTYPE}";
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification grid
+//
+regrid = {
+ to_grid = ${REGRID_TO_GRID};
+ vld_thresh = 0.5;
+ method = BUDGET;
+ width = 2;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+cat_thresh = [ NA ];
+cnt_thresh = [ NA ];
+cnt_logic = UNION;
+wind_thresh = [ NA ];
+wind_logic = UNION;
+
+//
+// Forecast and observation fields to be verified
+//
+fcst = {
+ field = [ ${FCST_FIELD} ];
+}
+
+obs = {
+ field = [ ${OBS_FIELD} ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Climatology mean data
+//
+climo_mean = {
+
+ file_name = [];
+ field = [];
+
+ regrid = {
+ vld_thresh = 0.5;
+ method = NEAREST;
+ width = 1;
+ }
+
+ time_interp_method = DW_MEAN;
+ match_day = FALSE;
+ time_step = 21600;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification masking regions
+//
+mask = {
+ grid = [];
+ poly = [ ${VERIF_MASK} ];
+ sid = [];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Confidence interval settings
+//
+ci_alpha = [ 0.05 ];
+
+boot = {
+ interval = PCTILE;
+ rep_prop = 1.0;
+ n_rep = 0;
+ rng = "mt19937";
+ seed = "";
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Data smoothing methods
+//
+interp = {
+ field = BOTH;
+ vld_thresh = 1.0;
+
+ type = [
+ {
+ method = NEAREST;
+ width = 1;
+ }
+ ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Neighborhood methods
+//
+nbrhd = {
+ field = BOTH;
+ shape = ${NEIGHBORHOOD_SHAPE};
+ width = [ ${NEIGHBORHOOD_WIDTH} ];
+ cov_thresh = [ >=0.5 ];
+ vld_thresh = 1.0;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Statistical output types
+//
+output_flag = {
+ fho = STAT;
+ ctc = STAT;
+ cts = STAT;
+ mctc = NONE;
+ mcts = NONE;
+ cnt = STAT;
+ sl1l2 = NONE;
+ sal1l2 = NONE;
+ vl1l2 = NONE;
+ val1l2 = NONE;
+ pct = NONE;
+ pstd = NONE;
+ pjc = NONE;
+ prc = NONE;
+ nbrctc = NONE;
+ nbrcts = NONE;
+ nbrcnt = STAT;
+}
+
+//
+// NetCDF matched pairs output file
+//
+nc_pairs_flag = {
+ latlon = FALSE;
+ raw = FALSE;
+ diff = FALSE;
+ climo = FALSE;
+ weight = FALSE;
+ nbrhd = FALSE;
+ apply_mask = FALSE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+rank_corr_flag = FALSE;
+tmp_dir = "/tmp";
+output_prefix = "${OUTPUT_PREFIX}";
+version = "V9.0.2";
+
+////////////////////////////////////////////////////////////////////////////////
diff --git a/ush/templates/parm/met/GridStatConfig_REFL b/ush/templates/parm/met/GridStatConfig_REFL
new file mode 100644
index 000000000..b3ebb13ec
--- /dev/null
+++ b/ush/templates/parm/met/GridStatConfig_REFL
@@ -0,0 +1,219 @@
+////////////////////////////////////////////////////////////////////////////////
+//
+// Grid-Stat configuration file.
+//
+// For additional information, see the MET_BASE/config/README file.
+//
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Output model name to be written
+//
+model = "${MODEL}";
+
+//
+// Output description to be written
+// May be set separately in each "obs.field" entry
+//
+desc = "NA";
+
+//
+// Output observation type to be written
+//
+obtype = "${OBTYPE}";
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification grid
+//
+regrid = {
+ to_grid = ${REGRID_TO_GRID};
+ vld_thresh = 0.5;
+ method = NEAREST;
+ width = 1;
+ shape = SQUARE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+cat_thresh = [];
+cnt_thresh = [ NA ];
+cnt_logic = UNION;
+wind_thresh = [ NA ];
+wind_logic = UNION;
+
+//
+// Forecast and observation fields to be verified
+//
+
+M_to_KFT(x) = x * 3.28084 * 0.001;
+KM_to_KFT(x) = x * 3280.84 * 0.001;
+
+fcst = {
+ field = [ ${FCST_FIELD} ];
+}
+
+obs = {
+ field = [ ${OBS_FIELD} ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Climatology mean data
+//
+climo_mean = {
+
+ file_name = [];
+ field = [];
+
+ regrid = {
+ vld_thresh = 0.5;
+ method = NEAREST;
+ width = 1;
+ }
+
+ time_interp_method = DW_MEAN;
+ match_day = FALSE;
+ time_step = 21600;
+}
+
+climo_stdev = climo_mean;
+climo_stdev = {
+ file_name = [];
+}
+
+climo_cdf_bins = 1;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification masking regions
+//
+mask = {
+ grid = [];
+ poly = [ ${VERIF_MASK} ];
+ sid = [];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Confidence interval settings
+//
+ci_alpha = [ 0.05 ];
+
+boot = {
+ interval = PCTILE;
+ rep_prop = 1.0;
+ n_rep = 0;
+ rng = "mt19937";
+ seed = "";
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Data smoothing methods
+//
+interp = {
+ field = NONE;
+ vld_thresh = 1.0;
+ shape = SQUARE;
+
+ type = [
+ {
+ method = NEAREST;
+ width = 1;
+ }
+ ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Neighborhood methods
+//
+nbrhd = {
+ field = BOTH;
+ shape = ${NEIGHBORHOOD_SHAPE};
+ width = [ ${NEIGHBORHOOD_WIDTH} ];
+ cov_thresh = [ >0. ];
+ vld_thresh = 1.0;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Fourier decomposition
+// May be set separately in each "obs.field" entry
+//
+fourier = {
+ wave_1d_beg = [];
+ wave_1d_end = [];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Gradient statistics
+// May be set separately in each "obs.field" entry
+//
+gradient = {
+ dx = [ 1 ];
+ dy = [ 1 ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Statistical output types
+//
+output_flag = {
+ fho = NONE;
+ ctc = STAT;
+ cts = STAT;
+ mctc = NONE;
+ mcts = NONE;
+ cnt = NONE;
+ sl1l2 = NONE;
+ sal1l2 = NONE;
+ vl1l2 = NONE;
+ val1l2 = NONE;
+ vcnt = NONE;
+ pct = NONE;
+ pstd = NONE;
+ pjc = NONE;
+ prc = NONE;
+ eclv = NONE;
+ nbrctc = STAT;
+ nbrcts = STAT;
+ nbrcnt = STAT;
+ grad = NONE;
+}
+
+//
+// NetCDF matched pairs output file
+//
+nc_pairs_flag = {
+ latlon = FALSE;
+ raw = FALSE;
+ diff = FALSE;
+ climo = FALSE;
+ weight = FALSE;
+ nbrhd = FALSE;
+ fourier = FALSE;
+ gradient = FALSE;
+ apply_mask = FALSE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+grid_weight_flag = NONE;
+rank_corr_flag = FALSE;
+tmp_dir = "/tmp";
+output_prefix = "${OUTPUT_PREFIX}";
+version = "V9.0.2";
+
+////////////////////////////////////////////////////////////////////////////////
diff --git a/ush/templates/parm/met/PB2NCConfig_conus_sfc b/ush/templates/parm/met/PB2NCConfig_conus_sfc
new file mode 100644
index 000000000..55ce1da73
--- /dev/null
+++ b/ush/templates/parm/met/PB2NCConfig_conus_sfc
@@ -0,0 +1,161 @@
+////////////////////////////////////////////////////////////////////////////////
+//
+// PB2NC configuration file.
+//
+// For additional information, see the MET_BASE/config/README file.
+//
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// PrepBufr message type
+//
+message_type = ${PB2NC_MESSAGE_TYPE} ;
+
+//
+// Mapping of message type group name to comma-separated list of values
+// Derive PRMSL only for SURFACE message types
+//
+message_type_group_map = [
+ { key = "SURFACE"; val = "ADPSFC,SFCSHP,MSONET"; },
+ { key = "ANYAIR"; val = "AIRCAR,AIRCFT"; },
+ { key = "ANYSFC"; val = "ADPSFC,SFCSHP,ADPUPA,PROFLR,MSONET"; },
+ { key = "ONLYSF"; val = "ADPSFC,SFCSHP"; }
+];
+
+//
+// Mapping of input PrepBufr message types to output message types
+//
+message_type_map = [];
+
+//
+// PrepBufr station ID
+//
+station_id = ${PB2NC_STATION_ID} ;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observation time window
+//
+obs_window = {
+ beg = ${OBS_WINDOW_BEGIN};
+ end = ${OBS_WINDOW_END};
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observation retention regions
+//
+mask = {
+ grid = "${PB2NC_GRID}";
+ poly = "${PB2NC_POLY}";
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observing location elevation
+//
+elevation_range = {
+ beg = -1000;
+ end = 100000;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observation types
+//
+pb_report_type = [ 120, 220, 221, 122, 222, 223, 224, 131, 133, 233, 153, 156, 157, 180, 280, 181, 182, 281, 282, 183, 284, 187, 287 ];
+
+in_report_type = [];
+
+instrument_type = [];
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Vertical levels to retain
+//
+level_range = {
+ beg = 1;
+ end = 511;
+}
+
+level_category = [0, 1, 4, 5, 6];
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// BUFR variable names to retain or derive.
+// Use obs_bufr_map to rename variables in the output.
+// If empty, process all available variables.
+//
+obs_bufr_var = ${OBS_BUFR_VAR_LIST};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Mapping of input BUFR variable names to output variables names.
+// The default PREPBUFR map, obs_prepbufr_map, is appended to this map.
+//
+obs_bufr_map = [];
+
+//
+// Default mapping for PREPBUFR. Replace input BUFR variable names with GRIB
+// abbreviations in the output. This default map is appended to obs_bufr_map.
+// This should not typically be overridden.
+//
+obs_prefbufr_map = [
+ { key = "HOVI"; val = "VIS"; },
+ { key = "PMO"; val = "PRMSL"; },
+ { key = "POB"; val = "PRES"; },
+ { key = "QOB"; val = "SPFH"; },
+ { key = "TOB"; val = "TMP"; },
+ { key = "TDO"; val = "DPT"; },
+ { key = "ZOB"; val = "HGT"; },
+ { key = "UOB"; val = "UGRD"; },
+ { key = "VOB"; val = "VGRD"; },
+ { key = "D_DPT"; val = "DPT"; },
+ { key = "D_WDIR"; val = "WDIR"; },
+ { key = "D_WIND"; val = "WIND"; },
+ { key = "D_RH"; val = "RH"; },
+ { key = "D_MIXR"; val = "MIXR"; },
+ { key = "D_PBL"; val = "PBL"; },
+ { key = "D_CAPE"; val = "CAPE"; },
+ { key = "TOCC"; val = "TCDC"; }
+];
+
+////////////////////////////////////////////////////////////////////////////////
+
+quality_mark_thresh = 3;
+event_stack_flag = TOP;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Time periods for the summarization
+// obs_var (string array) is added and works like grib_code (int array)
+// when use_var_id is enabled and variable names are saved.
+//
+time_summary = {
+ flag = ${TIME_SUMMARY_FLAG};
+ raw_data = FALSE;
+ beg = ${TIME_SUMMARY_BEG};
+ end = ${TIME_SUMMARY_END};
+ step = 300;
+ width = 600;
+ grib_code = [];
+ obs_var = ${TIME_SUMMARY_VAR_NAMES};
+ type = ${TIME_SUMMARY_TYPES};
+ vld_freq = 0;
+ vld_thresh = 0.0;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+tmp_dir = "/tmp";
+version = "V9.0.2";
+
+////////////////////////////////////////////////////////////////////////////////
diff --git a/ush/templates/parm/met/PB2NCConfig_upper_air b/ush/templates/parm/met/PB2NCConfig_upper_air
new file mode 100644
index 000000000..cf29a5208
--- /dev/null
+++ b/ush/templates/parm/met/PB2NCConfig_upper_air
@@ -0,0 +1,157 @@
+////////////////////////////////////////////////////////////////////////////////
+//
+// PB2NC configuration file.
+//
+// For additional information, see the MET_BASE/config/README file.
+//
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// PrepBufr message type
+//
+message_type = ${PB2NC_MESSAGE_TYPE} ;
+
+//
+// Mapping of message type group name to comma-separated list of values
+// Derive PRMSL only for SURFACE message types
+//
+message_type_group_map = [
+ { key = "SURFACE"; val = "ADPSFC,SFCSHP,MSONET"; },
+ { key = "ANYAIR"; val = "AIRCAR,AIRCFT"; },
+ { key = "ANYSFC"; val = "ADPSFC,SFCSHP,ADPUPA,PROFLR,MSONET"; },
+ { key = "ONLYSF"; val = "ADPSFC,SFCSHP"; }
+];
+
+//
+// Mapping of input PrepBufr message types to output message types
+//
+message_type_map = [];
+
+//
+// PrepBufr station ID
+//
+station_id = ${PB2NC_STATION_ID} ;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observation time window
+//
+obs_window = {
+ beg = ${OBS_WINDOW_BEGIN};
+ end = ${OBS_WINDOW_END};
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observation retention regions
+//
+mask = {
+ grid = "${PB2NC_GRID}";
+ poly = "${PB2NC_POLY}";
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observing location elevation
+//
+elevation_range = {
+ beg = -1000;
+ end = 100000;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Observation types
+//
+pb_report_type = [ 120, 220, 221, 122, 222, 223, 224, 131, 133, 233, 153, 156, 157, 180, 280, 181, 182, 281, 282, 183, 284, 187, 287 ];
+
+in_report_type = [];
+
+instrument_type = [];
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Vertical levels to retain
+//
+level_range = {
+ beg = 1;
+ end = 511;
+}
+
+level_category = [0, 1, 4, 5, 6];
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// BUFR variable names to retain or derive.
+// Use obs_bufr_map to rename variables in the output.
+// If empty, process all available variables.
+//
+obs_bufr_var = ${OBS_BUFR_VAR_LIST};
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Mapping of input BUFR variable names to output variables names.
+// The default PREPBUFR map, obs_prepbufr_map, is appended to this map.
+//
+obs_bufr_map = [];
+
+//
+// Default mapping for PREPBUFR. Replace input BUFR variable names with GRIB
+// abbreviations in the output. This default map is appended to obs_bufr_map.
+// This should not typically be overridden.
+//
+obs_prefbufr_map = [
+ { key = "POB"; val = "PRES"; },
+ { key = "QOB"; val = "SPFH"; },
+ { key = "TOB"; val = "TMP"; },
+ { key = "TDO"; val = "DPT"; },
+ { key = "ZOB"; val = "HGT"; },
+ { key = "UOB"; val = "UGRD"; },
+ { key = "VOB"; val = "VGRD"; },
+ { key = "D_DPT"; val = "DPT"; },
+ { key = "D_WDIR"; val = "WDIR"; },
+ { key = "D_WIND"; val = "WIND"; },
+ { key = "D_RH"; val = "RH"; },
+ { key = "D_MIXR"; val = "MIXR"; },
+ { key = "D_PRMSL"; val = "PRMSL"; }
+];
+
+////////////////////////////////////////////////////////////////////////////////
+
+quality_mark_thresh = 3;
+event_stack_flag = TOP;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Time periods for the summarization
+// obs_var (string array) is added and works like grib_code (int array)
+// when use_var_id is enabled and variable names are saved.
+//
+time_summary = {
+ flag = ${TIME_SUMMARY_FLAG};
+ raw_data = FALSE;
+ beg = ${TIME_SUMMARY_BEG};
+ end = ${TIME_SUMMARY_END};
+ step = 300;
+ width = 600;
+ grib_code = [];
+ obs_var = ${TIME_SUMMARY_VAR_NAMES};
+ type = ${TIME_SUMMARY_TYPES};
+ vld_freq = 0;
+ vld_thresh = 0.0;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+tmp_dir = "/tmp";
+version = "V9.0.2";
+
+////////////////////////////////////////////////////////////////////////////////
diff --git a/ush/templates/parm/met/PointStatConfig_conus_sfc b/ush/templates/parm/met/PointStatConfig_conus_sfc
new file mode 100644
index 000000000..ae1350cf4
--- /dev/null
+++ b/ush/templates/parm/met/PointStatConfig_conus_sfc
@@ -0,0 +1,259 @@
+////////////////////////////////////////////////////////////////////////////////
+//
+// Point-Stat configuration file.
+//
+// For additional information, see the MET_BASE/config/README file.
+//
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Output model name to be written
+//
+model = "${MODEL}";
+
+//
+// Output description to be written
+// May be set separately in each "obs.field" entry
+//
+desc = "NA";
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification grid
+// May be set separately in each "field" entry
+//
+regrid = {
+ to_grid = ${REGRID_TO_GRID};
+ method = BILIN;
+ width = 2;
+ vld_thresh = 0.5;
+ shape = SQUARE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// May be set separately in each "field" entry
+//
+censor_thresh = [];
+censor_val = [];
+cat_thresh = [ NA ];
+cnt_thresh = [ NA ];
+cnt_logic = UNION;
+wind_thresh = [ NA ];
+wind_logic = UNION;
+eclv_points = 0.05;
+rank_corr_flag = FALSE;
+
+//
+// Forecast and observation fields to be verified
+//
+fcst = {
+ field = [ ${FCST_FIELD} ];
+ };
+
+obs = {
+ field = [ ${OBS_FIELD} ];
+ };
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Point observation filtering options
+// May be set separately in each "obs.field" entry
+//
+message_type = ${POINT_STAT_MESSAGE_TYPE};
+sid_exc = [];
+obs_quality = [];
+duplicate_flag = NONE;
+obs_summary = NONE;
+obs_perc_value = 50;
+
+//
+// Mapping of message type group name to comma-separated list of values.
+//
+message_type_group_map = [
+ { key = "SURFACE"; val = "ADPSFC,SFCSHP,MSONET"; },
+ { key = "ANYAIR"; val = "AIRCAR,AIRCFT"; },
+ { key = "ANYSFC"; val = "ADPSFC,SFCSHP,ADPUPA,PROFLR,MSONET"; },
+ { key = "ONLYSF"; val = "ADPSFC,SFCSHP"; },
+ { key = "LANDSF"; val = "ADPSFC,MSONET"; },
+ { key = "WATERSF"; val = "SFCSHP"; }
+];
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Climatology data
+//
+climo_mean = {
+
+ file_name = [];
+ field = [];
+
+ regrid = {
+ method = NEAREST;
+ width = 1;
+ vld_thresh = 0.5;
+ shape = SQUARE;
+ }
+
+ time_interp_method = NEAREST;
+ match_month = TRUE;
+ match_day = TRUE;
+ time_step = 21600;
+}
+
+climo_stdev = climo_mean;
+climo_stdev = {
+ file_name = [];
+}
+
+climo_cdf_bins = 1;
+write_cdf_bins = FALSE;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Land/Sea mask
+// For LANDSF message types, only use forecast grid points where land = TRUE.
+// For WATERSF message types, only use forecast grid points where land = FALSE.
+// land_mask.flag may be set separately in each "obs.field" entry.
+//
+land_mask = {
+ flag = FALSE;
+ file_name = [];
+ field = { name = "LAND"; level = "L0"; }
+ regrid = { method = NEAREST; width = 1; }
+ thresh = eq1;
+}
+
+//
+// Topography
+// For SURFACE message types, only use observations where the topo - station
+// elevation difference meets the use_obs_thresh threshold.
+// For the observations kept, when interpolating forecast data to the
+// observation location, only use forecast grid points where the topo - station
+// difference meets the interp_fcst_thresh threshold.
+// topo_mask.flag may be set separately in each "obs.field" entry.
+//
+topo_mask = {
+ flag = FALSE;
+ file_name = [];
+ field = { name = "TOPO"; level = "L0"; }
+ regrid = { method = BILIN; width = 2; }
+ use_obs_thresh = ge-100&&le100;
+ interp_fcst_thresh = ge-50&&le50;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Point observation time window
+// May be set separately in each "obs.field" entry
+//
+obs_window = {
+ beg = ${OBS_WINDOW_BEGIN};
+ end = ${OBS_WINDOW_END};
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification masking regions
+// May be set separately in each "obs.field" entry
+//
+mask = {
+ grid = ${POINT_STAT_GRID};
+// poly = [
+// "${FIXverif_global}/vx_mask_files/grid2obs/CONUS.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/EAST.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/WEST.nc"
+// ];
+ poly = ${POINT_STAT_POLY};
+ sid = ${POINT_STAT_STATION_ID};
+ llpnt = [];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Confidence interval settings
+// May be set separately in each "obs.field" entry
+//
+ci_alpha = [ 0.05 ];
+
+boot = {
+ interval = PCTILE;
+ rep_prop = 1.0;
+ n_rep = 0;
+ rng = "mt19937";
+ seed = "";
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Interpolation methods
+// May be set separately in each "obs.field" entry
+//
+interp = {
+ vld_thresh = 1.0;
+ shape = SQUARE;
+
+ type = [
+ {
+ method = BILIN;
+ width = 2;
+ }
+ ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// HiRA verification method
+// May be set separately in each "obs.field" entry
+//
+hira = {
+ flag = FALSE;
+ width = [ 2, 3, 4, 5 ];
+ vld_thresh = 1.0;
+ cov_thresh = [ ==0.25 ];
+ shape = SQUARE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Statistical output types
+// May be set separately in each "obs.field" entry
+//
+output_flag = {
+ fho = NONE;
+ ctc = NONE;
+ cts = NONE;
+ mctc = NONE;
+ mcts = NONE;
+ cnt = STAT;
+ sl1l2 = STAT;
+ sal1l2 = NONE;
+ vl1l2 = STAT;
+ val1l2 = NONE;
+ vcnt = STAT;
+ pct = NONE;
+ pstd = NONE;
+ pjc = NONE;
+ prc = NONE;
+ eclv = NONE;
+ mpr = NONE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+tmp_dir = "/tmp";
+output_prefix = "${OUTPUT_PREFIX}";
+version = "V9.0.2";
+
+////////////////////////////////////////////////////////////////////////////////
diff --git a/ush/templates/parm/met/PointStatConfig_upper_air b/ush/templates/parm/met/PointStatConfig_upper_air
new file mode 100644
index 000000000..0dff52c03
--- /dev/null
+++ b/ush/templates/parm/met/PointStatConfig_upper_air
@@ -0,0 +1,262 @@
+////////////////////////////////////////////////////////////////////////////////
+//
+// Point-Stat configuration file.
+//
+// For additional information, see the MET_BASE/config/README file.
+//
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Output model name to be written
+//
+model = "${MODEL}";
+
+//
+// Output description to be written
+// May be set separately in each "obs.field" entry
+//
+desc = "NA";
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification grid
+// May be set separately in each "field" entry
+//
+regrid = {
+ to_grid = ${REGRID_TO_GRID};
+ method = BILIN;
+ width = 2;
+ vld_thresh = 0.5;
+ shape = SQUARE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// May be set separately in each "field" entry
+//
+censor_thresh = [];
+censor_val = [];
+cat_thresh = [ NA ];
+cnt_thresh = [ NA ];
+cnt_logic = UNION;
+wind_thresh = [ NA ];
+wind_logic = UNION;
+eclv_points = 0.05;
+rank_corr_flag = FALSE;
+
+//
+// Forecast and observation fields to be verified
+//
+fcst = {
+ field = [ ${FCST_FIELD} ];
+ };
+
+obs = {
+ field = [ ${OBS_FIELD} ];
+ };
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Point observation filtering options
+// May be set separately in each "obs.field" entry
+//
+message_type = ${POINT_STAT_MESSAGE_TYPE};
+sid_exc = [];
+obs_quality = [];
+duplicate_flag = NONE;
+obs_summary = NONE;
+obs_perc_value = 50;
+
+//
+// Mapping of message type group name to comma-separated list of values.
+//
+message_type_group_map = [
+ { key = "SURFACE"; val = "ADPSFC,SFCSHP,MSONET"; },
+ { key = "ANYAIR"; val = "AIRCAR,AIRCFT"; },
+ { key = "ANYSFC"; val = "ADPSFC,SFCSHP,ADPUPA,PROFLR,MSONET"; },
+ { key = "ONLYSF"; val = "ADPSFC,SFCSHP"; },
+ { key = "LANDSF"; val = "ADPSFC,MSONET"; },
+ { key = "WATERSF"; val = "SFCSHP"; }
+];
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Climatology data
+//
+climo_mean = {
+
+ file_name = [];
+ field = [];
+
+ regrid = {
+ method = NEAREST;
+ width = 1;
+ vld_thresh = 0.5;
+ shape = SQUARE;
+ }
+
+ time_interp_method = NEAREST;
+ match_month = TRUE;
+ match_day = TRUE;
+ time_step = 21600;
+}
+
+climo_stdev = climo_mean;
+climo_stdev = {
+ file_name = [];
+}
+
+climo_cdf_bins = 1;
+write_cdf_bins = FALSE;
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Land/Sea mask
+// For LANDSF message types, only use forecast grid points where land = TRUE.
+// For WATERSF message types, only use forecast grid points where land = FALSE.
+// land_mask.flag may be set separately in each "obs.field" entry.
+//
+land_mask = {
+ flag = FALSE;
+ file_name = [];
+ field = { name = "LAND"; level = "L0"; }
+ regrid = { method = NEAREST; width = 1; }
+ thresh = eq1;
+}
+
+//
+// Topography
+// For SURFACE message types, only use observations where the topo - station
+// elevation difference meets the use_obs_thresh threshold.
+// For the observations kept, when interpolating forecast data to the
+// observation location, only use forecast grid points where the topo - station
+// difference meets the interp_fcst_thresh threshold.
+// topo_mask.flag may be set separately in each "obs.field" entry.
+//
+topo_mask = {
+ flag = FALSE;
+ file_name = [];
+ field = { name = "TOPO"; level = "L0"; }
+ regrid = { method = BILIN; width = 2; }
+ use_obs_thresh = ge-100&&le100;
+ interp_fcst_thresh = ge-50&&le50;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Point observation time window
+// May be set separately in each "obs.field" entry
+//
+obs_window = {
+ beg = ${OBS_WINDOW_BEGIN};
+ end = ${OBS_WINDOW_END};
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Verification masking regions
+// May be set separately in each "obs.field" entry
+//
+mask = {
+ grid = ${POINT_STAT_GRID};
+// poly = [
+// "${FIXverif_global}/vx_mask_files/grid2obs/NA.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/SA.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/NH.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/SH.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/TRO.nc",
+// "${FIXverif_global}/vx_mask_files/grid2obs/CONUS.nc"
+// ];
+ poly = ${POINT_STAT_POLY};
+ sid = ${POINT_STAT_STATION_ID};
+ llpnt = [];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Confidence interval settings
+// May be set separately in each "obs.field" entry
+//
+ci_alpha = [ 0.05 ];
+
+boot = {
+ interval = PCTILE;
+ rep_prop = 1.0;
+ n_rep = 0;
+ rng = "mt19937";
+ seed = "";
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Interpolation methods
+// May be set separately in each "obs.field" entry
+//
+interp = {
+ vld_thresh = 1.0;
+ shape = SQUARE;
+
+ type = [
+ {
+ method = BILIN;
+ width = 2;
+ }
+ ];
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// HiRA verification method
+// May be set separately in each "obs.field" entry
+//
+hira = {
+ flag = FALSE;
+ width = [ 2, 3, 4, 5 ];
+ vld_thresh = 1.0;
+ cov_thresh = [ ==0.25 ];
+ shape = SQUARE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+//
+// Statistical output types
+// May be set separately in each "obs.field" entry
+//
+output_flag = {
+ fho = NONE;
+ ctc = NONE;
+ cts = NONE;
+ mctc = NONE;
+ mcts = NONE;
+ cnt = STAT;
+ sl1l2 = STAT;
+ sal1l2 = NONE;
+ vl1l2 = STAT;
+ val1l2 = NONE;
+ vcnt = STAT;
+ pct = NONE;
+ pstd = NONE;
+ pjc = NONE;
+ prc = NONE;
+ eclv = NONE;
+ mpr = NONE;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+tmp_dir = "/tmp";
+output_prefix = "${OUTPUT_PREFIX}";
+version = "V9.0.2";
+
+////////////////////////////////////////////////////////////////////////////////
diff --git a/ush/templates/parm/metplus/APCP_01h.conf b/ush/templates/parm/metplus/APCP_01h.conf
new file mode 100644
index 000000000..01b1edc58
--- /dev/null
+++ b/ush/templates/parm/metplus/APCP_01h.conf
@@ -0,0 +1,91 @@
+# Grid to Grid Precipitation
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+FCST_GRID_STAT_INPUT_DIR = {INPUT_BASE}
+
+OUTPUT_BASE = {ENV[EXPTDIR]}
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+OBS_GRID_STAT_INPUT_DIR = {ENV[OBS_DIR]}
+
+[filename_templates]
+# format of filenames
+FCST_GRID_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+
+# ANLYS
+OBS_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = CCPA
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = GridStat
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_APCP
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{ENV[acc]}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr(1,24,1)
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = False
+OBS_PCP_COMBINE_RUN = False
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# list of variables to compare
+# thresholds in mm, equal to .01",.02",.05",.10",.25",.50",.75",1.0"
+FCST_VAR1_NAME = APCP
+FCST_VAR1_LEVELS = A01
+
+BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54
+
+OBS_VAR1_NAME = APCP
+OBS_VAR1_LEVELS = A01
+
+# Neighborhood shape and widths
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
+
+# Forecast data description variables
+FCST_IS_PROB = False
+
diff --git a/ush/templates/parm/metplus/APCP_03h.conf b/ush/templates/parm/metplus/APCP_03h.conf
new file mode 100644
index 000000000..21a7d059a
--- /dev/null
+++ b/ush/templates/parm/metplus/APCP_03h.conf
@@ -0,0 +1,121 @@
+# Grid to Grid Precipitation
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+OUTPUT_BASE = {ENV[EXPTDIR]}
+
+FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}
+FCST_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR}
+
+OBS_PCP_COMBINE_INPUT_DIR = {ENV[OBS_DIR]}
+OBS_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+OBS_GRID_STAT_INPUT_DIR = {OBS_PCP_COMBINE_OUTPUT_DIR}
+
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# format of filenames
+FCST_PCP_COMBINE_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}_a{level?fmt=%HH}h
+FCST_GRID_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+# ANLYS
+OBS_PCP_COMBINE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2
+OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.hrap.conus.gb2_a{level?fmt=%HH}h
+OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = CCPA
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = PcpCombine, GridStat
+
+# Run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = True
+OBS_PCP_COMBINE_RUN = True
+
+# Mode of pcp_combine to use (SUM, ADD, SUBTRACT)
+FCST_PCP_COMBINE_METHOD = ADD
+OBS_PCP_COMBINE_METHOD = ADD
+
+FCST_PCP_COMBINE_CONSTANT_INIT = True
+
+# Accumulation interval available in forecast data
+FCST_PCP_COMBINE_INPUT_ACCUMS = 01
+FCST_PCP_COMBINE_OUTPUT_ACCUM = 03
+
+# Accumulation interval available in obs data
+OBS_PCP_COMBINE_INPUT_ACCUMS = 01
+OBS_PCP_COMBINE_OUTPUT_ACCUM = 03
+
+# If 'bucket' output already exists, skip the PcpCombine step for the data
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
+
+# Forecast data description variables
+FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
+FCST_IS_PROB = false
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_APCP
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{ENV[acc]}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr(1,24,1)
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# list of variables to compare
+# thresholds in mm, equal to .01",.02",.05",.10",.25",.50",.75",1.0"
+FCST_VAR1_NAME = APCP
+FCST_VAR1_LEVELS = A03
+
+BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350
+
+OBS_VAR1_NAME = APCP
+OBS_VAR1_LEVELS = A03
+
+# Neighborhood shape and widths
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
+
+# Forecast data description variables
+FCST_IS_PROB = False
+
diff --git a/ush/templates/parm/metplus/APCP_03h.conf_3hCCPA b/ush/templates/parm/metplus/APCP_03h.conf_3hCCPA
new file mode 100644
index 000000000..3235ff631
--- /dev/null
+++ b/ush/templates/parm/metplus/APCP_03h.conf_3hCCPA
@@ -0,0 +1,108 @@
+# Grid to Grid Precipitation
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+OUTPUT_BASE = {ENV[EXPTDIR]}
+FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}
+FCST_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR}
+
+OBS_GRID_STAT_INPUT_DIR = {ENV[OBS_DIR]}
+
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# format of filenames
+FCST_PCP_COMBINE_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}_a{level?fmt=%HH}h
+FCST_GRID_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+# ANLYS
+OBS_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.03h.hrap.conus.gb2
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = CCPA
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = PcpCombine, GridStat
+
+# Run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = True
+OBS_PCP_COMBINE_RUN = False
+
+# Mode of pcp_combine to use (SUM, ADD, SUBTRACT)
+FCST_PCP_COMBINE_METHOD = ADD
+
+FCST_PCP_COMBINE_CONSTANT_INIT = True
+
+# Accumulation interval available in forecast data
+FCST_PCP_COMBINE_INPUT_ACCUMS = 01
+FCST_PCP_COMBINE_OUTPUT_ACCUM = 03
+
+# If 'bucket' output already exists, skip the PcpCombine step for the data
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
+
+# Forecast data description variables
+FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
+FCST_IS_PROB = false
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_APCP
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{ENV[acc]}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr({ENV[ACCUM]},{ENV[LAST_HR]},{ENV[ACCUM]})
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# list of variables to compare
+# thresholds in mm, equal to .01",.02",.05",.10",.25",.50",.75",1.0"
+FCST_VAR1_NAME = APCP
+FCST_VAR1_LEVELS = A03
+
+BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350
+
+OBS_VAR1_NAME = APCP
+OBS_VAR1_LEVELS = A03
+
+# Neighborhood shape and widths
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
+
diff --git a/ush/templates/parm/metplus/APCP_06h.conf b/ush/templates/parm/metplus/APCP_06h.conf
new file mode 100644
index 000000000..20ec8ec31
--- /dev/null
+++ b/ush/templates/parm/metplus/APCP_06h.conf
@@ -0,0 +1,121 @@
+# Grid to Grid Precipitation
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+OUTPUT_BASE = {ENV[EXPTDIR]}
+
+FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}
+FCST_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR}
+
+OBS_PCP_COMBINE_INPUT_DIR = {ENV[OBS_DIR]}
+OBS_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+OBS_GRID_STAT_INPUT_DIR = {OBS_PCP_COMBINE_OUTPUT_DIR}
+
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# format of filenames
+FCST_PCP_COMBINE_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}_a{level?fmt=%HH}h
+FCST_GRID_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+# ANLYS
+OBS_PCP_COMBINE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2
+OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.hrap.conus.gb2_a{level?fmt=%HH}h
+OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = CCPA
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = PcpCombine, GridStat
+
+# Run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = True
+OBS_PCP_COMBINE_RUN = True
+
+# Mode of pcp_combine to use (SUM, ADD, SUBTRACT)
+FCST_PCP_COMBINE_METHOD = ADD
+OBS_PCP_COMBINE_METHOD = ADD
+
+FCST_PCP_COMBINE_CONSTANT_INIT = True
+
+# Accumulation interval available in forecast data
+FCST_PCP_COMBINE_INPUT_ACCUMS = 01
+FCST_PCP_COMBINE_OUTPUT_ACCUM = 06
+
+# Accumulation interval available in obs data
+OBS_PCP_COMBINE_INPUT_ACCUMS = 01
+OBS_PCP_COMBINE_OUTPUT_ACCUM = 06
+
+# If 'bucket' output already exists, skip the PcpCombine step for the data
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
+
+# Forecast data description variables
+FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
+FCST_IS_PROB = false
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_APCP
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{ENV[acc]}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr(1,24,1)
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# list of variables to compare
+# thresholds in mm, equal to .01",.02",.05",.10",.25",.50",.75",1.0"
+FCST_VAR1_NAME = APCP
+FCST_VAR1_LEVELS = A06
+
+BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350,ge8.890,ge12.700
+
+OBS_VAR1_NAME = APCP
+OBS_VAR1_LEVELS = A06
+
+# Neighborhood shape and widths
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
+
+# Forecast data description variables
+FCST_IS_PROB = False
+
diff --git a/ush/templates/parm/metplus/APCP_06h.conf_6hCCPA b/ush/templates/parm/metplus/APCP_06h.conf_6hCCPA
new file mode 100644
index 000000000..d3d1a9f20
--- /dev/null
+++ b/ush/templates/parm/metplus/APCP_06h.conf_6hCCPA
@@ -0,0 +1,108 @@
+# Grid to Grid Precipitation
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+OUTPUT_BASE = {ENV[EXPTDIR]}
+FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}
+FCST_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR}
+
+OBS_GRID_STAT_INPUT_DIR = {ENV[OBS_DIR]}
+
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# format of filenames
+FCST_PCP_COMBINE_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}_a{level?fmt=%HH}h
+FCST_GRID_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+# ANLYS
+OBS_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.06h.hrap.conus.gb2
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = CCPA
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = PcpCombine, GridStat
+
+# Run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = True
+OBS_PCP_COMBINE_RUN = False
+
+# Mode of pcp_combine to use (SUM, ADD, SUBTRACT)
+FCST_PCP_COMBINE_METHOD = ADD
+
+FCST_PCP_COMBINE_CONSTANT_INIT = True
+
+# Accumulation interval available in forecast data
+FCST_PCP_COMBINE_INPUT_ACCUMS = 01
+FCST_PCP_COMBINE_OUTPUT_ACCUM = 06
+
+# If 'bucket' output already exists, skip the PcpCombine step for the data
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
+
+# Forecast data description variables
+FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
+FCST_IS_PROB = false
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_APCP
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{ENV[acc]}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr({ENV[ACCUM]},{ENV[LAST_HR]},{ENV[ACCUM]})
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# list of variables to compare
+# thresholds in mm, equal to .01",.02",.05",.10",.25",.50",.75",1.0"
+FCST_VAR1_NAME = APCP
+FCST_VAR1_LEVELS = A06
+
+BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350,ge8.890,ge12.700
+
+OBS_VAR1_NAME = APCP
+OBS_VAR1_LEVELS = A06
+
+# Neighborhood shape and widths
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
+
diff --git a/ush/templates/parm/metplus/APCP_24h.conf b/ush/templates/parm/metplus/APCP_24h.conf
new file mode 100644
index 000000000..046eca2b3
--- /dev/null
+++ b/ush/templates/parm/metplus/APCP_24h.conf
@@ -0,0 +1,121 @@
+# Grid to Grid Precipitation
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+OUTPUT_BASE = {ENV[EXPTDIR]}
+
+FCST_PCP_COMBINE_INPUT_DIR = {INPUT_BASE}
+FCST_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+FCST_GRID_STAT_INPUT_DIR = {FCST_PCP_COMBINE_OUTPUT_DIR}
+
+OBS_PCP_COMBINE_INPUT_DIR = {ENV[OBS_DIR]}
+OBS_PCP_COMBINE_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pcp_combine
+OBS_GRID_STAT_INPUT_DIR = {OBS_PCP_COMBINE_OUTPUT_DIR}
+
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# format of filenames
+FCST_PCP_COMBINE_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+FCST_PCP_COMBINE_OUTPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}_a{level?fmt=%HH}h
+FCST_GRID_STAT_INPUT_TEMPLATE = {FCST_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+# ANLYS
+OBS_PCP_COMBINE_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.01h.hrap.conus.gb2
+OBS_PCP_COMBINE_OUTPUT_TEMPLATE = {valid?fmt=%Y%m%d}/ccpa.t{valid?fmt=%H}z.hrap.conus.gb2_a{level?fmt=%HH}h
+OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_PCP_COMBINE_OUTPUT_TEMPLATE}
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = CCPA
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = PcpCombine, GridStat
+
+# Run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = True
+OBS_PCP_COMBINE_RUN = True
+
+# Mode of pcp_combine to use (SUM, ADD, SUBTRACT)
+FCST_PCP_COMBINE_METHOD = ADD
+OBS_PCP_COMBINE_METHOD = ADD
+
+FCST_PCP_COMBINE_CONSTANT_INIT = True
+
+# Accumulation interval available in forecast data
+FCST_PCP_COMBINE_INPUT_ACCUMS = 01
+FCST_PCP_COMBINE_OUTPUT_ACCUM = 24
+
+# Accumulation interval available in obs data
+OBS_PCP_COMBINE_INPUT_ACCUMS = 01
+OBS_PCP_COMBINE_OUTPUT_ACCUM = 24
+
+# If 'bucket' output already exists, skip the PcpCombine step for the data
+PCP_COMBINE_SKIP_IF_OUTPUT_EXISTS = True
+
+# Forecast data description variables
+FCST_PCP_COMBINE_INPUT_DATATYPE = GRIB
+FCST_IS_PROB = false
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_APCP
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{ENV[acc]}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr(1,24,1)
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# list of variables to compare
+# thresholds in mm, equal to .01",.02",.05",.10",.25",.50",.75",1.0"
+FCST_VAR1_NAME = APCP
+FCST_VAR1_LEVELS = A24
+
+BOTH_VAR1_THRESH = gt0.0,ge0.254,ge0.508,ge1.27,ge2.54,ge3.810,ge6.350,ge8.890,ge12.700,ge25.400
+
+OBS_VAR1_NAME = APCP
+OBS_VAR1_LEVELS = A24
+
+# Neighborhood shape and widths
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 3,5,7
+
+# Forecast data description variables
+FCST_IS_PROB = False
+
diff --git a/ush/templates/parm/metplus/PointStat_conus_sfc.conf b/ush/templates/parm/metplus/PointStat_conus_sfc.conf
new file mode 100644
index 000000000..86f903b50
--- /dev/null
+++ b/ush/templates/parm/metplus/PointStat_conus_sfc.conf
@@ -0,0 +1,172 @@
+[dir]
+# Input and output data directories
+PB2NC_INPUT_DIR = {ENV[OBS_DIR]}
+PB2NC_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pb2nc
+
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+FCST_POINT_STAT_INPUT_DIR = {INPUT_BASE}
+
+OBS_POINT_STAT_INPUT_DIR = {PB2NC_OUTPUT_DIR}
+
+OUTPUT_BASE = {ENV[EXPTDIR]}
+POINT_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# Template to look for prepbvur input to PB2NC relative to PB2NC_INPUT_DIR
+PB2NC_INPUT_TEMPLATE = prepbufr.ndas.{valid?fmt=%Y%m%d%H}
+# Template to use to write output from PB2NC
+PB2NC_OUTPUT_TEMPLATE = prepbufr.ndas.{valid?fmt=%Y%m%d%H}.nc
+
+# Template to look for forecast/observation input to PointStat relative to FCST_POINT_STAT_INPUT_DIR
+FCST_POINT_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+OBS_POINT_STAT_INPUT_TEMPLATE = prepbufr.ndas.{valid?fmt=%Y%m%d%H}.nc
+
+POINT_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/point_stat
+
+[config]
+
+# List of applications to run
+PROCESS_LIST = PB2NC, PointStat
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr(1,24,1)
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Order of loops to process data - Options are times, processes
+LOOP_ORDER = times
+
+# Verbosity of MET output - overrides LOG_VERBOSITY for PointStat only
+LOG_POINT_STAT_VERBOSITY = 2
+
+# For both PB2NC and point_stat
+OBS_WINDOW_BEGIN = -1799
+OBS_WINDOW_END = 1800
+
+PB2NC_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
+PB2NC_WINDOW_END = {OBS_WINDOW_END}
+
+# Location of MET config file to pass to PB2NC
+PB2NC_CONFIG_FILE = {CONFIG_DIR}/PB2NCConfig_conus_sfc
+
+# If set to True, skip run if the output file determined by the output directory and
+# filename template already exists
+PB2NC_SKIP_IF_OUTPUT_EXISTS = True
+
+# Values to pass to pb2nc config file using environment variables of the same name.
+PB2NC_GRID =
+PB2NC_POLY =
+PB2NC_STATION_ID =
+PB2NC_MESSAGE_TYPE =
+
+# Leave empty to process all
+# PB2NC_OBS_BUFR_VAR_LIST = POB, QOB, TOB, ZOB, UOB, VOB, PMO, TOCC, TDO, HOVI, D_DPT, D_WDIR, D_WIND, D_RH, D_MIXR, D_PRMSL
+PB2NC_OBS_BUFR_VAR_LIST = PMO, ZOB, TOB, TDO, QOB, UOB, VOB, PWO, TOCC, D_RH, HOVI, CEILING, D_PBL, D_CAPE, MXGS
+
+# For defining the time periods for summarization
+# False for no time summary, True otherwise
+# The rest of the PB2NC_TIME_SUMMARY variables are ignored if set to False
+PB2NC_TIME_SUMMARY_FLAG = False
+PB2NC_TIME_SUMMARY_BEG = 000000
+PB2NC_TIME_SUMMARY_END = 235959
+PB2NC_TIME_SUMMARY_VAR_NAMES = PMO,TOB,TDO,UOB,VOB,PWO,TOCC
+PB2NC_TIME_SUMMARY_TYPES = min, max, range, mean, stdev, median, p80
+
+# Location of MET config file to pass to GridStat
+# References PARM_BASE which is the location of the parm directory corresponding
+# to the ush directory of the master_metplus.py script that is called
+# or the value of the environment variable METPLUS_PARM_BASE if set
+POINT_STAT_CONFIG_FILE ={CONFIG_DIR}/PointStatConfig_conus_sfc
+
+# Model/fcst and obs name, e.g. GFS, NAM, GDAS, etc.
+MODEL = {ENV[MODEL]}
+OBTYPE = NDAS
+
+# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
+POINT_STAT_REGRID_TO_GRID = NONE
+
+POINT_STAT_OUTPUT_PREFIX = {MODEL}_{OBTYPE}_ADPSFC
+
+# Verification Masking regions
+# Indicate which grid and polygon masking region, if applicable
+POINT_STAT_GRID =
+
+# List of full path to poly masking files. NOTE: Only short lists of poly
+# files work (those that fit on one line), a long list will result in an
+# environment variable that is too long, resulting in an error. For long
+# lists of poly masking files (i.e. all the mask files in the NCEP_mask
+# directory), define these in the MET point_stat configuration file.
+POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+POINT_STAT_STATION_ID =
+
+# Message types, if all message types are to be returned, leave this empty,
+# otherwise indicate the message types of interest.
+POINT_STAT_MESSAGE_TYPE = ADPSFC
+
+# Variables and levels as specified in the field dictionary of the MET
+# point_stat configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS,
+# (optional) FCST_VARn_OPTION
+BOTH_VAR1_NAME = TMP
+BOTH_VAR1_LEVELS = Z2
+
+BOTH_VAR2_NAME = DPT
+BOTH_VAR2_LEVELS = Z2
+
+BOTH_VAR3_NAME = UGRD
+BOTH_VAR3_LEVELS = Z10
+BOTH_VAR3_THRESH = >=2.572 ;; m/s or 5kts
+
+BOTH_VAR4_NAME = VGRD
+BOTH_VAR4_LEVELS = Z10
+BOTH_VAR4_THRESH = >=2.572 ;; m/s or 5kts
+
+BOTH_VAR5_NAME = PRMSL
+BOTH_VAR5_LEVELS = Z0
+
+BOTH_VAR6_NAME = TCDC
+BOTH_VAR6_LEVELS = L0
+BOTH_VAR6_OPTIONS = GRIB_lvl_typ = 200;
+
+BOTH_VAR7_NAME = VIS
+BOTH_VAR7_LEVELS = L0
+BOTH_VAR7_THRESH = <805, <1609, <4828, <8045 ,>=8045, <16090
+
+FCST_VAR8_NAME = HGT
+FCST_VAR8_LEVELS = L0
+FCST_VAR8_OPTIONS = GRIB_lvl_typ = 215; desc = "CEILING";
+FCST_VAR8_THRESH = <152, <305, <914, <1520, <3040, >=914
+OBS_VAR8_NAME = CEILING
+OBS_VAR8_LEVELS = L0
+OBS_VAR8_OPTIONS = GRIB_lvl_typ = 215;
+OBS_VAR8_THRESH = <152, <305, <914, <1520, <3040, >=914
+
+FCST_VAR9_NAME = CAPE
+FCST_VAR9_LEVELS = L0
+FCST_VAR9_OPTIONS = cnt_thresh = [ >0 ];
+FCST_VAR9_THRESH = >500, >1000, >1500, >2000, >3000, >4000
+OBS_VAR9_NAME = CAPE
+OBS_VAR9_LEVELS = L0-100000
+OBS_VAR9_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION;
+OBS_VAR9_THRESH = >500, >1000, >1500, >2000, >3000, >4000
+
+FCST_VAR10_NAME = HGT
+FCST_VAR10_LEVELS = L0
+FCST_VAR10_OPTIONS = GRIB_lvl_typ = 220;
+OBS_VAR10_NAME = PBL
+OBS_VAR10_LEVELS = L0
+OBS_VAR10_OPTIONS = desc = "RI";
+
+BOTH_VAR11_NAME = GUST
+BOTH_VAR11_LEVELS = Z0
diff --git a/ush/templates/parm/metplus/PointStat_upper_air.conf b/ush/templates/parm/metplus/PointStat_upper_air.conf
new file mode 100644
index 000000000..fe86b17a9
--- /dev/null
+++ b/ush/templates/parm/metplus/PointStat_upper_air.conf
@@ -0,0 +1,157 @@
+[dir]
+# Input and output data directories
+PB2NC_INPUT_DIR = {ENV[OBS_DIR]}
+PB2NC_OUTPUT_DIR = {ENV[EXPTDIR]}/{ENV[CDATE]}/metprd/pb2nc
+
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+FCST_POINT_STAT_INPUT_DIR = {INPUT_BASE}
+
+OBS_POINT_STAT_INPUT_DIR = {PB2NC_OUTPUT_DIR}
+
+OUTPUT_BASE = {ENV[EXPTDIR]}
+POINT_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+[filename_templates]
+# Template to look for prepbvur input to PB2NC relative to PB2NC_INPUT_DIR
+PB2NC_INPUT_TEMPLATE = prepbufr.ndas.{valid?fmt=%Y%m%d%H}
+# Template to use to write output from PB2NC
+PB2NC_OUTPUT_TEMPLATE = prepbufr.ndas.{valid?fmt=%Y%m%d%H}.nc
+
+# Template to look for forecast/observation input to PointStat relative to FCST_POINT_STAT_INPUT_DIR
+FCST_POINT_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+OBS_POINT_STAT_INPUT_TEMPLATE = prepbufr.ndas.{valid?fmt=%Y%m%d%H}.nc
+
+POINT_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/point_stat
+[config]
+
+# List of applications to run
+PROCESS_LIST = PB2NC, PointStat
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+INIT_TIME_FMT = %Y%m%d%H
+
+# list of forecast leads to process.
+LEAD_SEQ = begin_end_incr(0,{ENV[fhr_last]},6)
+#LEAD_SEQ = {ENV[fhr_list]}
+
+# Order of loops to process data - Options are times, processes
+LOOP_ORDER = times
+
+# Verbosity of MET output - overrides LOG_VERBOSITY for PointStat only
+LOG_POINT_STAT_VERBOSITY = 2
+
+# For both PB2NC and point_stat
+OBS_WINDOW_BEGIN = -1799
+OBS_WINDOW_END = 1800
+
+PB2NC_WINDOW_BEGIN = {OBS_WINDOW_BEGIN}
+PB2NC_WINDOW_END = {OBS_WINDOW_END}
+
+# Location of MET config file to pass to PB2NC
+PB2NC_CONFIG_FILE = {CONFIG_DIR}/PB2NCConfig_upper_air
+
+# If set to True, skip run if the output file determined by the output directory and
+# filename template already exists
+PB2NC_SKIP_IF_OUTPUT_EXISTS = True
+
+# Values to pass to pb2nc config file using environment variables of the same name.
+PB2NC_GRID =
+PB2NC_POLY =
+PB2NC_STATION_ID =
+PB2NC_MESSAGE_TYPE =
+
+# Leave empty to process all
+# PB2NC_OBS_BUFR_VAR_LIST = POB, QOB, TOB, ZOB, UOB, VOB, PMO, TOCC, TDO, HOVI, D_DPT, D_WDIR, D_WIND, D_RH, D_MIXR, D_PRMSL
+PB2NC_OBS_BUFR_VAR_LIST = POB, QOB, TOB, ZOB, UOB, VOB, D_RH, D_CAPE, D_PBL
+
+# For defining the time periods for summarization
+# False for no time summary, True otherwise
+# The rest of the PB2NC_TIME_SUMMARY variables are ignored if set to False
+PB2NC_TIME_SUMMARY_FLAG = False
+PB2NC_TIME_SUMMARY_BEG = 000000
+PB2NC_TIME_SUMMARY_END = 235959
+PB2NC_TIME_SUMMARY_VAR_NAMES = PMO,TOB,TDO,UOB,VOB,PWO,TOCC
+PB2NC_TIME_SUMMARY_TYPES = min, max, range, mean, stdev, median, p80
+
+# Location of MET config file to pass to GridStat
+# References PARM_BASE which is the location of the parm directory corresponding
+# to the ush directory of the master_metplus.py script that is called
+# or the value of the environment variable METPLUS_PARM_BASE if set
+POINT_STAT_CONFIG_FILE ={CONFIG_DIR}/PointStatConfig_upper_air
+
+# Model/fcst and obs name, e.g. GFS, NAM, GDAS, etc.
+MODEL = {ENV[MODEL]}
+OBTYPE = NDAS
+
+# Regrid to specified grid. Indicate NONE if no regridding, or the grid id
+POINT_STAT_REGRID_TO_GRID = NONE
+
+POINT_STAT_OUTPUT_PREFIX = {MODEL}_{OBTYPE}_ADPUPA
+
+# Verification Masking regions
+# Indicate which grid and polygon masking region, if applicable
+POINT_STAT_GRID =
+
+# List of full path to poly masking files. NOTE: Only short lists of poly
+# files work (those that fit on one line), a long list will result in an
+# environment variable that is too long, resulting in an error. For long
+# lists of poly masking files (i.e. all the mask files in the NCEP_mask
+# directory), define these in the MET point_stat configuration file.
+POINT_STAT_POLY = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+POINT_STAT_STATION_ID =
+
+# Message types, if all message types are to be returned, leave this empty,
+# otherwise indicate the message types of interest.
+POINT_STAT_MESSAGE_TYPE = ADPUPA
+
+# Variables and levels as specified in the field dictionary of the MET
+# point_stat configuration file. Specify as FCST_VARn_NAME, FCST_VARn_LEVELS,
+# (optional) FCST_VARn_OPTION
+BOTH_VAR1_NAME = TMP
+BOTH_VAR1_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
+
+BOTH_VAR2_NAME = RH
+BOTH_VAR2_LEVELS = P1000, P925, P850, P700, P500, P400, P300
+
+BOTH_VAR3_NAME = UGRD
+BOTH_VAR3_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
+BOTH_VAR3_THRESH = >=2.572 ;; m/s or 5kts
+
+BOTH_VAR4_NAME = VGRD
+BOTH_VAR4_LEVELS = P1000, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
+BOTH_VAR4_THRESH = >=2.572 ;; m/s or 5kts
+
+BOTH_VAR5_NAME = HGT
+BOTH_VAR5_LEVELS = P1000, P950, P925, P850, P700, P500, P400, P300, P250, P200, P150, P100, P50, P20, P10
+
+FCST_VAR6_NAME = CAPE
+FCST_VAR6_LEVELS = L0
+FCST_VAR6_OPTIONS = cnt_thresh = [ >0 ];
+FCST_VAR6_THRESH = >500, >1000, >1500, >2000, >3000, >4000
+OBS_VAR6_NAME = CAPE
+OBS_VAR6_LEVELS = L0-100000
+OBS_VAR6_OPTIONS = cnt_thresh = [ >0 ]; cnt_logic = UNION;
+OBS_VAR6_THRESH = >500, >1000, >1500, >2000, >3000, >4000
+
+FCST_VAR7_NAME = HPBL
+FCST_VAR7_LEVELS = Z0
+OBS_VAR7_NAME = PBL
+OBS_VAR7_LEVELS = L0
+OBS_VAR7_OPTIONS = desc = "TKE";
+
+FCST_VAR8_NAME = HGT
+FCST_VAR8_LEVELS = L0
+FCST_VAR8_OPTIONS = GRIB_lvl_typ = 220;
+OBS_VAR8_NAME = PBL
+OBS_VAR8_LEVELS = L0
+OBS_VAR8_OPTIONS = desc = "RI";
diff --git a/ush/templates/parm/metplus/REFC.conf b/ush/templates/parm/metplus/REFC.conf
new file mode 100644
index 000000000..db4864043
--- /dev/null
+++ b/ush/templates/parm/metplus/REFC.conf
@@ -0,0 +1,94 @@
+# Composite Reflectivity Verification
+
+[dir]
+# Input and output data directories
+INPUT_BASE = {ENV[EXPTDIR]}/{ENV[CDATE]}/postprd
+FCST_GRID_STAT_INPUT_DIR = {INPUT_BASE}
+
+OUTPUT_BASE = {ENV[EXPTDIR]}
+GRID_STAT_OUTPUT_DIR = {OUTPUT_BASE}
+
+LOG_DIR = {OUTPUT_BASE}/log
+
+OBS_GRID_STAT_INPUT_DIR = {ENV[OBS_DIR]}
+
+[filename_templates]
+# format of filenames
+FCST_GRID_STAT_INPUT_TEMPLATE = {ENV[NET]}.t{init?fmt=%H}z.bgdawpf{lead?fmt=%HHH}.tm{init?fmt=%H}.grib2
+
+# ANLYS
+#OBS_GRID_STAT_INPUT_TEMPLATE = {OBS_GRID_STAT_INPUT_DIR}/{valid?fmt=%Y%m%d}/MergedReflectivityQComposite_00.00_{valid?fmt=%Y%m%d}-{valid?fmt=%H}0000.grib2
+OBS_GRID_STAT_INPUT_TEMPLATE = {valid?fmt=%Y%m%d}/MergedReflectivityQComposite_00.00_{valid?fmt=%Y%m%d}-{valid?fmt=%H}0000.grib2
+
+GRID_STAT_OUTPUT_TEMPLATE = {init?fmt=%Y%m%d%H}/metprd/grid_stat
+
+# Location of MET poly files
+GRID_STAT_VERIFICATION_MASK_TEMPLATE = {MET_INSTALL_DIR}/share/met/poly/CONUS.poly
+
+[config]
+# Model to verify
+MODEL = {ENV[MODEL]}
+FCST_NATIVE_DATA_TYPE = GRIB
+
+# Set obtype to vx
+OBTYPE = MRMS
+OBS_NATIVE_DATA_TYPE = GRIB
+
+# List of applications to run
+PROCESS_LIST = GridStat
+
+# location of grid_stat MET config file
+GRID_STAT_CONFIG_FILE = {CONFIG_DIR}/GridStatConfig_REFL
+GRID_STAT_OUTPUT_PREFIX = {MODEL}_{CURRENT_FCST_NAME}_{OBTYPE}
+
+# time looping - options are INIT, VALID, RETRO, and REALTIME
+LOOP_BY = INIT
+
+# Format of INIT_BEG and INIT_END
+INIT_TIME_FMT = %Y%m%d%H
+
+# Start time for METplus run
+INIT_BEG = {ENV[CDATE]}
+# End time for METplus run
+INIT_END = {ENV[CDATE]}
+# Increment between METplus runs in seconds. Must be > 60
+INIT_INCREMENT = 3600
+
+# list of forecast leads to process.
+#LEAD_SEQ = begin_end_incr(1,24,1)
+LEAD_SEQ = {ENV[fhr_list]}
+
+# Options are times, processes
+# times = run all items in the PROCESS_LIST for a single initialization
+# time, then repeat until all times have been evaluated.
+# processes = run each item in the PROCESS_LIST for all times
+# specified, then repeat for the next item in the PROCESS_LIST.
+LOOP_ORDER = times
+
+# run pcp_combine on forecast/obs data?
+FCST_PCP_COMBINE_RUN = False
+OBS_PCP_COMBINE_RUN = False
+
+# Set grid to verify on
+GRID_STAT_REGRID_TO_GRID = FCST
+
+# Forecast/Observation variable Information
+FCST_VAR1_NAME = REFC
+FCST_VAR1_LEVELS = L0
+
+BOTH_VAR1_THRESH = ge20, ge30, ge40, ge50
+
+OBS_VAR1_NAME = MergedReflectivityQComposite
+OBS_VAR1_LEVELS = L0
+OBS_VAR1_OPTIONS = censor_thresh = eq-999; censor_val = -9999;
+
+OBS_GRID_STAT_FILE_WINDOW_BEGIN = -300
+OBS_GRID_STAT_FILE_WINDOW_END = 300
+
+# Neighborhood shape and width
+GRID_STAT_NEIGHBORHOOD_SHAPE = SQUARE
+GRID_STAT_NEIGHBORHOOD_WIDTH = 1, 3, 5, 7
+
+# Forecast data description variables
+FCST_IS_PROB = False
+
diff --git a/ush/templates/parm/metplus/common.conf b/ush/templates/parm/metplus/common.conf
new file mode 100644
index 000000000..fcdeeb9c2
--- /dev/null
+++ b/ush/templates/parm/metplus/common.conf
@@ -0,0 +1,22 @@
+[dir]
+# Commonly used base METplus variables
+# Location of METplus code
+METPLUS_BASE = {ENV[METPLUS_PATH]}
+
+# Met install location
+MET_INSTALL_DIR = {ENV[MET_INSTALL_DIR]}
+MET_BASE = {MET_INSTALL_DIR}/share/met
+
+# Location of METplus parm directory
+METPLUS_PARM_BASE = {ENV[METPLUS_CONF]}
+
+# Location of configuration files used by MET applications
+CONFIG_DIR = {ENV[MET_CONFIG]}
+
+# Other directories
+TMP_DIR = /tmp
+
+[config]
+LOG_LEVEL = DEBUG
+LOG_MET_VERBOSITY = 2
+LOG_MET_OUTPUT_TO_METPLUS = yes
diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh
index dce4752a8..f9f4f7c6d 100644
--- a/ush/valid_param_vals.sh
+++ b/ush/valid_param_vals.sh
@@ -46,6 +46,8 @@ valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal" "regional_la
valid_vals_RUN_TASK_MAKE_GRID=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
valid_vals_RUN_TASK_MAKE_OROG=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
valid_vals_RUN_TASK_MAKE_SFC_CLIMO=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
+valid_vals_RUN_TASK_VX_GRIDSTAT=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
+valid_vals_RUN_TASK_VX_POINTSTAT=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
valid_vals_QUILTING=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
valid_vals_PRINT_ESMF=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")
valid_vals_USE_CRON_TO_RELAUNCH=("TRUE" "true" "YES" "yes" "FALSE" "false" "NO" "no")