From 6bd798fedd3e05f413cdb4f2c8325373ab3193a4 Mon Sep 17 00:00:00 2001 From: danielabdi-noaa <52012304+danielabdi-noaa@users.noreply.github.com> Date: Mon, 18 Jul 2022 11:10:30 -0600 Subject: [PATCH] [develop]: Remove shell workflow. (#764) * Bug fix with FIELD_TABLE_FN * Modify crontab management, use config_defaults.sh. * Add status badge. * Update cheyenne crontab management. * source lmod-setup * Add main to set_predef_grid * Bug fix in predef_grid * Don't import dead params. * Fix bug in resetting VERBOSE * Minor fix in INI config. * Construct var_defns components from dictionary. * Allow also lower case variables to be exported. * Updates to python workflow due to PR #776 * Use python versions of link_fix and set_FV3_sfc in job script. * Use python versions of create_diag/model. * Some fixes addressing Christina's suggestions. * Delete shell workflow * Append pid to temp files. * Update scripts to work with the latest hashes of UFS_UTILS and UPP (#775) * update input namelist of chgres_cube * update diag_table templates * update scripts * back to original * specify miniconda version on Jet * Remove -S option from link_fix call. * Fixes due to merge * Cosmoetic changes. Co-authored-by: Chan-Hoo.Jeon-NOAA <60152248+chan-hoo@users.noreply.github.com> --- README.md | 2 + scripts/exregional_make_grid.sh | 12 +- scripts/exregional_make_orog.sh | 14 +- scripts/exregional_make_sfc_climo.sh | 14 +- scripts/exregional_run_fcst.sh | 32 +- .../WE2E/get_WE2Etest_names_subdirs_descs.sh | 152 +- tests/WE2E/run_WE2E_tests.sh | 2 +- tests/WE2E/setup_WE2E_tests.sh | 1 + ush/calculate_cost.py | 106 + ush/check_ruc_lsm.sh | 120 - ush/config_defaults.sh | 11 +- ush/config_defaults.yaml | 2017 ------------ ush/constants.py | 27 - ush/constants.sh | 6 +- ush/create_diag_table_file.py | 29 +- ush/create_diag_table_file.sh | 117 - ush/create_model_configure_file.py | 59 +- ush/create_model_configure_file.sh | 257 -- ush/generate_FV3LAM_wflow.py | 74 +- ush/generate_FV3LAM_wflow.sh | 1213 ------- ush/get_crontab_contents.py | 126 +- ush/get_crontab_contents.sh | 74 - ush/launch_FV3LAM_wflow.sh | 34 +- ush/link_fix.py | 46 +- ush/link_fix.sh | 460 --- ush/predef_grid_params.yaml | 43 +- ush/python_utils/__init__.py | 47 +- ush/python_utils/config_parser.py | 23 +- ush/python_utils/environment.py | 25 +- ush/python_utils/misc.py | 18 + ush/set_FV3nml_ens_stoch_seeds.py | 39 +- ush/set_FV3nml_ens_stoch_seeds.sh | 187 -- ush/set_FV3nml_sfc_climo_filenames.py | 23 +- ush/set_FV3nml_sfc_climo_filenames.sh | 231 -- ush/set_cycle_dates.sh | 145 - ush/set_extrn_mdl_params.sh | 35 - ush/set_gridparams_ESGgrid.py | 33 +- ush/set_gridparams_ESGgrid.sh | 215 -- ush/set_gridparams_GFDLgrid.py | 2 - ush/set_gridparams_GFDLgrid.sh | 571 ---- ush/set_ozone_param.sh | 241 -- ush/set_predef_grid_params.py | 23 +- ush/set_predef_grid_params.sh | 1765 ---------- ush/set_thompson_mp_fix_files.sh | 192 -- ush/setup.py | 364 +-- ush/setup.sh | 2853 ----------------- ush/valid_param_vals.sh | 59 - ush/valid_param_vals.yaml | 2 +- 48 files changed, 714 insertions(+), 11427 deletions(-) create mode 100755 ush/calculate_cost.py delete mode 100644 ush/check_ruc_lsm.sh delete mode 100644 ush/config_defaults.yaml delete mode 100644 ush/constants.py delete mode 100644 ush/create_diag_table_file.sh delete mode 100644 ush/create_model_configure_file.sh delete mode 100755 ush/generate_FV3LAM_wflow.sh delete mode 100644 ush/get_crontab_contents.sh mode change 100644 => 100755 ush/link_fix.py delete mode 100755 ush/link_fix.sh delete mode 100644 ush/set_FV3nml_ens_stoch_seeds.sh delete mode 100644 ush/set_FV3nml_sfc_climo_filenames.sh delete mode 100644 ush/set_cycle_dates.sh delete mode 100644 ush/set_extrn_mdl_params.sh delete mode 100644 ush/set_gridparams_ESGgrid.sh delete mode 100644 ush/set_gridparams_GFDLgrid.sh delete mode 100644 ush/set_ozone_param.sh delete mode 100644 ush/set_predef_grid_params.sh delete mode 100644 ush/set_thompson_mp_fix_files.sh delete mode 100755 ush/setup.sh delete mode 100644 ush/valid_param_vals.sh diff --git a/README.md b/README.md index de61c44209..e0b3e9bd52 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +[![Python unittests](https://github.com/ufs-community/regional_workflow/actions/workflows/python_unittests.yaml/badge.svg)](https://github.com/ufs-community/regional_workflow/actions/workflows/python_unittests.yaml) + # Regional Workflow **The regional workflow in this repository can no longer be run in a stand-alone configuration. To clone the end-to-end system, build the code, and run the workflow, see:** diff --git a/scripts/exregional_make_grid.sh b/scripts/exregional_make_grid.sh index de27902e32..929be84dc5 100755 --- a/scripts/exregional_make_grid.sh +++ b/scripts/exregional_make_grid.sh @@ -17,8 +17,6 @@ #----------------------------------------------------------------------- # . $USHDIR/make_grid_mosaic_file.sh -. $USHDIR/link_fix.sh -. $USHDIR/set_FV3nml_sfc_climo_filenames.sh # #----------------------------------------------------------------------- # @@ -589,9 +587,9 @@ halo failed." # #----------------------------------------------------------------------- # -link_fix \ - verbose="$VERBOSE" \ - file_group="grid" || \ +python3 $USHDIR/link_fix.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + --file-group "grid" || \ print_err_msg_exit "\ Call to function to create symlinks to the various grid and mosaic files failed." @@ -607,7 +605,9 @@ failed." # #----------------------------------------------------------------------- # -set_FV3nml_sfc_climo_filenames || print_err_msg_exit "\ +python3 $USHDIR/set_FV3nml_sfc_climo_filenames.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + || print_err_msg_exit "\ Call to function to set surface climatology file names in the FV3 namelist file failed." # diff --git a/scripts/exregional_make_orog.sh b/scripts/exregional_make_orog.sh index 58e453a97a..3904924433 100755 --- a/scripts/exregional_make_orog.sh +++ b/scripts/exregional_make_orog.sh @@ -12,14 +12,6 @@ # #----------------------------------------------------------------------- # -# Source other necessary files. -# -#----------------------------------------------------------------------- -# -. $USHDIR/link_fix.sh -# -#----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. # @@ -581,9 +573,9 @@ cd_vrfy - # #----------------------------------------------------------------------- # -link_fix \ - verbose="$VERBOSE" \ - file_group="orog" || \ +python3 $USHDIR/link_fix.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + --file-group "orog" || \ print_err_msg_exit "\ Call to function to create links to orography files failed." # diff --git a/scripts/exregional_make_sfc_climo.sh b/scripts/exregional_make_sfc_climo.sh index 129cfcb75b..995b17f305 100755 --- a/scripts/exregional_make_sfc_climo.sh +++ b/scripts/exregional_make_sfc_climo.sh @@ -12,14 +12,6 @@ # #----------------------------------------------------------------------- # -# Source other necessary files. -# -#----------------------------------------------------------------------- -# -. $USHDIR/link_fix.sh -# -#----------------------------------------------------------------------- -# # Save current shell options (in a global array). Then set new options # for this script/function. # @@ -241,9 +233,9 @@ esac # #----------------------------------------------------------------------- # -link_fix \ - verbose="$VERBOSE" \ - file_group="sfc_climo" || \ +python3 $USHDIR/link_fix.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + --file-group "sfc_climo" || \ print_err_msg_exit "\ Call to function to create links to surface climatology files failed." # diff --git a/scripts/exregional_run_fcst.sh b/scripts/exregional_run_fcst.sh index 8593f9c536..18b05d5c3c 100755 --- a/scripts/exregional_run_fcst.sh +++ b/scripts/exregional_run_fcst.sh @@ -9,16 +9,6 @@ # . ${GLOBAL_VAR_DEFNS_FP} . $USHDIR/source_util_funcs.sh -. $USHDIR/set_FV3nml_ens_stoch_seeds.sh -# -#----------------------------------------------------------------------- -# -# Source other necessary files. -# -#----------------------------------------------------------------------- -# -. $USHDIR/create_model_configure_file.sh -. $USHDIR/create_diag_table_file.sh # #----------------------------------------------------------------------- # @@ -446,7 +436,9 @@ fi if [ "${DO_ENSEMBLE}" = TRUE ] && ([ "${DO_SPP}" = TRUE ] || [ "${DO_SPPT}" = TRUE ] || [ "${DO_SHUM}" = TRUE ] \ [ "${DO_SKEB}" = TRUE ] || [ "${DO_LSM_SPP}" = TRUE ]); then - set_FV3nml_ens_stoch_seeds cdate="$cdate" || print_err_msg_exit "\ + python3 $USHDIR/set_FV3nml_ens_stoch_seeds.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + --cdate "$cdate" || print_err_msg_exit "\ Call to function to create the ensemble-based namelist for the current cycle's (cdate) run directory (run_dir) failed: cdate = \"${cdate}\" @@ -464,12 +456,13 @@ fi # #----------------------------------------------------------------------- # -create_model_configure_file \ - cdate="$cdate" \ - run_dir="${run_dir}" \ - sub_hourly_post="${SUB_HOURLY_POST}" \ - dt_subhourly_post_mnts="${DT_SUBHOURLY_POST_MNTS}" \ - dt_atmos="${DT_ATMOS}" || print_err_msg_exit "\ +python3 $USHDIR/create_model_configure_file.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + --cdate "$cdate" \ + --run-dir "${run_dir}" \ + --sub-hourly-post "${SUB_HOURLY_POST}" \ + --dt-subhourly-post-mnts "${DT_SUBHOURLY_POST_MNTS}" \ + --dt-atmos "${DT_ATMOS}" || print_err_msg_exit "\ Call to function to create a model configuration file for the current cycle's (cdate) run directory (run_dir) failed: cdate = \"${cdate}\" @@ -482,8 +475,9 @@ cycle's (cdate) run directory (run_dir) failed: # #----------------------------------------------------------------------- # -create_diag_table_file \ - run_dir="${run_dir}" || print_err_msg_exit "\ +python3 $USHDIR/create_diag_table_file.py \ + --path-to-defns ${GLOBAL_VAR_DEFNS_FP} \ + --run-dir "${run_dir}" || print_err_msg_exit "\ Call to function to create a diag table file for the current cycle's (cdate) run directory (run_dir) failed: run_dir = \"${run_dir}\"" diff --git a/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh b/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh index 39dc68f516..23ef0500c2 100755 --- a/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh +++ b/tests/WE2E/get_WE2Etest_names_subdirs_descs.sh @@ -438,9 +438,6 @@ function get_WE2Etest_names_subdirs_descs() { # ushdir=$( readlink -f "$WE2Edir/../../ush" ) . $ushdir/constants.sh - . $ushdir/set_predef_grid_params.sh - . $ushdir/set_gridparams_GFDLgrid.sh - . $ushdir/set_gridparams_ESGgrid.sh # #----------------------------------------------------------------------- # @@ -1220,70 +1217,16 @@ ${test_desc}${stripped_line} " # parameters. The way the latter are obtained depends on whether or not # a predefined grid is being used. # -# If using a predefined grid, call the set_predef_grid_params() function -# to get the grid parameters. -# - if [ ! -z "${PREDEF_GRID_NAME}" ]; then -# -# Note: -# Can set "quilting" to "FALSE" in the following argument list because -# the write-component parameters are not needed below; only those of the -# native grid are needed. -# - set_predef_grid_params \ - predef_grid_name="${PREDEF_GRID_NAME}" \ - quilting="FALSE" \ - outvarname_grid_gen_method="grid_gen_method" \ - outvarname_esggrid_lon_ctr="esggrid_lon_ctr" \ - outvarname_esggrid_lat_ctr="esggrid_lat_ctr" \ - outvarname_esggrid_delx="esggrid_delx" \ - outvarname_esggrid_dely="esggrid_dely" \ - outvarname_esggrid_nx="esggrid_nx" \ - outvarname_esggrid_ny="esggrid_ny" \ - outvarname_esggrid_pazi="esggrid_pazi" \ - outvarname_esggrid_wide_halo_width="esggrid_wide_halo_width" \ - outvarname_gfdlgrid_lon_t6_ctr="gfdlgrid_lon_t6_ctr" \ - outvarname_gfdlgrid_lat_t6_ctr="gfdlgrid_lat_t6_ctr" \ - outvarname_gfdlgrid_stretch_fac="gfdlgrid_stretch_fac" \ - outvarname_gfdlgrid_num_cells="gfdlgrid_num_cells" \ - outvarname_gfdlgrid_refine_ratio="gfdlgrid_refine_ratio" \ - outvarname_gfdlgrid_istart_of_rgnl_dom_on_t6g="gfdlgrid_istart_of_rgnl_dom_on_t6g" \ - outvarname_gfdlgrid_iend_of_rgnl_dom_on_t6g="gfdlgrid_iend_of_rgnl_dom_on_t6g" \ - outvarname_gfdlgrid_jstart_of_rgnl_dom_on_t6g="gfdlgrid_jstart_of_rgnl_dom_on_t6g" \ - outvarname_gfdlgrid_jend_of_rgnl_dom_on_t6g="gfdlgrid_jend_of_rgnl_dom_on_t6g" \ - outvarname_dt_atmos="dta" -# -# If using a custom grid, the test's configuration file should contain -# the grid parameters. Source this file and set the values of the grid -# parameters it contains to local variables. -# - else - - . ./${config_fn} - grid_gen_method="${GRID_GEN_METHOD}" - if [ "${grid_gen_method}" = "GFDLgrid" ]; then - gfdlgrid_lon_t6_ctr="${GFDLgrid_LON_T6_CTR}" - gfdlgrid_lat_t6_ctr="${GFDLgrid_LAT_T6_CTR}" - gfdlgrid_num_cells="${GFDLgrid_NUM_CELLS}" - gfdlgrid_stretch_fac="${GFDLgrid_STRETCH_FAC}" - gfdlgrid_refine_ratio="${GFDLgrid_REFINE_RATIO}" - gfdlgrid_istart_of_rgnl_dom_on_t6g="${GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G}" - gfdlgrid_iend_of_rgnl_dom_on_t6g="${GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G}" - gfdlgrid_jstart_of_rgnl_dom_on_t6g="${GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G}" - gfdlgrid_jend_of_rgnl_dom_on_t6g="${GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G}" - elif [ "${grid_gen_method}" = "ESGgrid" ]; then - esggrid_lon_ctr="${ESGgrid_LON_CTR}" - esggrid_lat_ctr="${ESGgrid_LAT_CTR}" - esggrid_delx="${ESGgrid_DELX}" - esggrid_dely="${ESGgrid_DELY}" - esggrid_nx="${ESGgrid_NX}" - esggrid_ny="${ESGgrid_NY}" - esggrid_pazi="${ESGgrid_PAZI}" - esggrid_wide_halo_width="${ESGgrid_WIDE_HALO_WIDTH}" - fi - dta="${DT_ATMOS}" +params=$(\ + PREDEF_GRID_NAME="${PREDEF_GRID_NAME}" \ + QUILTING="FALSE" \ + RADIUS_EARTH=${RADIUS_EARTH} \ + DEGS_PER_RADIAN=${DEGS_PER_RADIAN} \ + NH4=${NH4} \ + $ushdir/calculate_cost.py -c "${test_configs_basedir}/$subdir/${config_fn}") + +read dta nxny dta_r nxny_r <<< "${params}" - fi # # Save the value of dta (which is just dt_atmos) in an array. The single # quote at the beginning forces Google Sheets to interpret this quantity @@ -1292,49 +1235,9 @@ ${test_desc}${stripped_line} " # prim_test_dt_atmos+=( "'${dta}" ) # -# The way the number of grid points in the horizontal directions (nx and -# ny) are calculated depends on the method used to generate the grid as -# well as the grid parameters for that method. -# - if [ "${grid_gen_method}" = "GFDLgrid" ]; then -# -# Note: -# The workflow generation mode (run_envir) can be set to "community" here -# since this does not affect the values of nx and ny. -# - set_gridparams_GFDLgrid \ - lon_of_t6_ctr="${gfdlgrid_lon_t6_ctr}" \ - lat_of_t6_ctr="${gfdlgrid_lat_t6_ctr}" \ - res_of_t6g="${gfdlgrid_num_cells}" \ - stretch_factor="${gfdlgrid_stretch_fac}" \ - refine_ratio_t6g_to_t7g="${gfdlgrid_refine_ratio}" \ - istart_of_t7_on_t6g="${gfdlgrid_istart_of_rgnl_dom_on_t6g}" \ - iend_of_t7_on_t6g="${gfdlgrid_iend_of_rgnl_dom_on_t6g}" \ - jstart_of_t7_on_t6g="${gfdlgrid_jstart_of_rgnl_dom_on_t6g}" \ - jend_of_t7_on_t6g="${gfdlgrid_jend_of_rgnl_dom_on_t6g}" \ - verbose="$verbose" \ - outvarname_nx_of_t7_on_t7g="nx" \ - outvarname_ny_of_t7_on_t7g="ny" - - elif [ "${grid_gen_method}" = "ESGgrid" ]; then - - set_gridparams_ESGgrid \ - lon_ctr="${esggrid_lon_ctr}" \ - lat_ctr="${esggrid_lat_ctr}" \ - nx="${esggrid_nx}" \ - ny="${esggrid_ny}" \ - pazi="${esggrid_pazi}" \ - halo_width="${esggrid_wide_halo_width}" \ - delx="${esggrid_delx}" \ - dely="${esggrid_dely}" \ - outvarname_nx="nx" \ - outvarname_ny="ny" - - fi -# # Calculate the total number of horizontal grid points. # - num_grid_pts=$(( nx*ny )) + num_grid_pts=$nxny # # Calculate the number of time steps for the test. Note that FCST_LEN_HRS # is in units of hours while dta is in units of seconds. Also, the factor @@ -1348,30 +1251,6 @@ ${test_desc}${stripped_line} " # ac=$(( num_grid_pts*num_time_steps*nf )) # -# Unset all grid paramters so that they are not accidentally reused for -# the next test. -# - unset gfdlgrid_lon_t6_ctr \ - gfdlgrid_lat_t6_ctr \ - gfdlgrid_num_cells \ - gfdlgrid_stretch_fac \ - gfdlgrid_refine_ratio \ - gfdlgrid_istart_of_rgnl_dom_on_t6g \ - gfdlgrid_iend_of_rgnl_dom_on_t6g \ - gfdlgrid_jstart_of_rgnl_dom_on_t6g \ - gfdlgrid_jend_of_rgnl_dom_on_t6g \ - esggrid_lon_ctr \ - esggrid_lat_ctr \ - esggrid_nx \ - esggrid_ny \ - esggrid_pazi \ - esggrid_wide_halo_width \ - esggrid_delx \ - esggrid_dely \ - dta \ - nx \ - ny -# # Save the absolute cost for this test in the array that will eventually # contain the relative cost. The values in this array will be divided # by abs_cost_ref later below to obtain relative costs. @@ -1397,16 +1276,9 @@ ${test_desc}${stripped_line} " # #----------------------------------------------------------------------- # - set_predef_grid_params \ - predef_grid_name="RRFS_CONUS_25km" \ - quilting="FALSE" \ - outvarname_esggrid_nx="nx" \ - outvarname_esggrid_ny="ny" \ - outvarname_dt_atmos="dta" - - num_grid_pts=$(( nx*ny )) + num_grid_pts=$nxny_r fcst_len_hrs="6" - num_time_steps=$(( (fcst_len_hrs*3600 + dta - 1)/dta )) + num_time_steps=$(( (fcst_len_hrs*3600 + dta_r - 1)/dta_r )) abs_cost_ref=$(( num_grid_pts*num_time_steps )) for (( i=0; i<=$((num_prim_tests-1)); i++ )); do diff --git a/tests/WE2E/run_WE2E_tests.sh b/tests/WE2E/run_WE2E_tests.sh index b2c8c537c7..605a9178a3 100755 --- a/tests/WE2E/run_WE2E_tests.sh +++ b/tests/WE2E/run_WE2E_tests.sh @@ -1281,7 +1281,7 @@ exist or is not a directory: # #----------------------------------------------------------------------- # - $ushdir/generate_FV3LAM_wflow.sh || \ + $ushdir/generate_FV3LAM_wflow.py || \ print_err_msg_exit "\ Could not generate an experiment for the test specified by test_name: test_name = \"${test_name}\"" diff --git a/tests/WE2E/setup_WE2E_tests.sh b/tests/WE2E/setup_WE2E_tests.sh index de6cdf0c24..5b084c2fc6 100755 --- a/tests/WE2E/setup_WE2E_tests.sh +++ b/tests/WE2E/setup_WE2E_tests.sh @@ -79,6 +79,7 @@ exec_subdir='bin_intel/bin' env_path="${SRW_APP_DIR}/modulefiles" env_file="wflow_${machine}" echo "-- Load environment =>" $env_file +source ${SRW_APP_DIR}/etc/lmod-setup.sh ${machine} module use ${env_path} module load ${env_file} conda activate regional_workflow diff --git a/ush/calculate_cost.py b/ush/calculate_cost.py new file mode 100755 index 0000000000..010b892232 --- /dev/null +++ b/ush/calculate_cost.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 + +import os +import unittest +import argparse + +from python_utils import set_env_var, import_vars, export_vars, load_config_file, flatten_dict + +from set_predef_grid_params import set_predef_grid_params +from set_gridparams_ESGgrid import set_gridparams_ESGgrid +from set_gridparams_GFDLgrid import set_gridparams_GFDLgrid + +def calculate_cost(config_fn): + global PREDEF_GRID_NAME, QUILTING, GRID_GEN_METHOD + + #import all environment variables + import_vars() + + #get grid config parameters (predefined or custom) + if PREDEF_GRID_NAME: + set_env_var('QUILTING',False) + set_predef_grid_params() + import_vars() + else: + cfg_u = load_config_file(config_fn) + cfg_u = flatten_dict(cfg_u) + import_vars(dictionary=cfg_u) + + #number of gridpoints (nx*ny) depends on grid generation method + if GRID_GEN_METHOD == "GFDLgrid": + (\ + LON_CTR,LAT_CTR,NX,NY,NHW,STRETCH_FAC, + ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG, + IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG, + JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG, + JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG \ + ) = \ + set_gridparams_GFDLgrid( \ + lon_of_t6_ctr=GFDLgrid_LON_T6_CTR, \ + lat_of_t6_ctr=GFDLgrid_LAT_T6_CTR, \ + res_of_t6g=GFDLgrid_NUM_CELLS, \ + stretch_factor=GFDLgrid_STRETCH_FAC, \ + refine_ratio_t6g_to_t7g=GFDLgrid_REFINE_RATIO, \ + istart_of_t7_on_t6g=GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G, \ + iend_of_t7_on_t6g=GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G, \ + jstart_of_t7_on_t6g=GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G, \ + jend_of_t7_on_t6g=GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G) + + elif GRID_GEN_METHOD == "ESGgrid": + (\ + LON_CTR,LAT_CTR,NX,NY,PAZI, + NHW,STRETCH_FAC,DEL_ANGLE_X_SG,DEL_ANGLE_Y_SG, + NEG_NX_OF_DOM_WITH_WIDE_HALO, + NEG_NY_OF_DOM_WITH_WIDE_HALO \ + ) = \ + set_gridparams_ESGgrid( \ + lon_ctr=ESGgrid_LON_CTR, \ + lat_ctr=ESGgrid_LAT_CTR, \ + nx=ESGgrid_NX, \ + ny=ESGgrid_NY, \ + pazi=ESGgrid_PAZI, \ + halo_width=ESGgrid_WIDE_HALO_WIDTH, \ + delx=ESGgrid_DELX, \ + dely=ESGgrid_DELY) + + cost = [DT_ATMOS, NX*NY] + + #reference grid (6-hour forecast on RRFS_CONUS_25km) + PREDEF_GRID_NAME="RRFS_CONUS_25km" + + export_vars() + set_predef_grid_params() + import_vars() + cost.extend([DT_ATMOS, ESGgrid_NX*ESGgrid_NY]) + + return cost + +#interface +if __name__ == "__main__": + parser = argparse.ArgumentParser(description=\ + 'Calculates parameters needed for calculating cost.') + parser.add_argument('--cfg','-c',dest='cfg',required=True, + help='config file containing grip params') + args = parser.parse_args() + + params = calculate_cost(args.cfg) + print(' '.join(map(str,params))) + +class Testing(unittest.TestCase): + def test_calculate_cost(self): + USHDIR = os.path.dirname(os.path.abspath(__file__)) + config_fn = os.path.join(USHDIR, "config.community.sh") + params = calculate_cost(config_fn) + self.assertCountEqual(params, [36, 1987440, 36, 28689]) + + def setUp(self): + set_env_var('DEBUG',False) + set_env_var('PREDEF_GRID_NAME',"RRFS_CONUS_3km") + set_env_var('DT_ATMOS',36) + set_env_var('LAYOUT_X',18) + set_env_var('LAYOUT_Y',36) + set_env_var('BLOCKSIZE',28) + set_env_var('QUILTING',False) + set_env_var('RADIUS_EARTH',6371200.0) + set_env_var('DEGS_PER_RADIAN',57.2957795131) + diff --git a/ush/check_ruc_lsm.sh b/ush/check_ruc_lsm.sh deleted file mode 100644 index 35e4db195f..0000000000 --- a/ush/check_ruc_lsm.sh +++ /dev/null @@ -1,120 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that checks whether the RUC land surface -# model (LSM) parameterization is being called by the selected physics -# suite. If so, it sets the variable ruc_lsm used to "TRUE". If not, -# it sets this variable to "FALSE". It then "returns" this variable, -# i.e. it sets the environment variable whose name is specified by the -# input argument output_varname_sdf_uses_ruc_lsm to whatever sdf_uses_ruc_lsm -# is set to. -# -#----------------------------------------------------------------------- -# -function check_ruc_lsm() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "ccpp_phys_suite_fp" \ - "output_varname_sdf_uses_ruc_lsm" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local ruc_lsm_name \ - regex_search \ - ruc_lsm_name_or_null \ - sdf_uses_ruc_lsm -# -#----------------------------------------------------------------------- -# -# Check the suite definition file to see whether the Thompson microphysics -# parameterization is being used. -# -#----------------------------------------------------------------------- -# - ruc_lsm_name="lsm_ruc" - regex_search="^[ ]*(${ruc_lsm_name})<\/scheme>[ ]*$" - ruc_lsm_name_or_null=$( $SED -r -n -e "s/${regex_search}/\1/p" "${ccpp_phys_suite_fp}" ) - - if [ "${ruc_lsm_name_or_null}" = "${ruc_lsm_name}" ]; then - sdf_uses_ruc_lsm="TRUE" - elif [ -z "${ruc_lsm_name_or_null}" ]; then - sdf_uses_ruc_lsm="FALSE" - else - print_err_msg_exit "\ -Unexpected value returned for ruc_lsm_name_or_null: - ruc_lsm_name_or_null = \"${ruc_lsm_name_or_null}\" -This variable should be set to either \"${ruc_lsm_name}\" or an empty -string." - fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - eval ${output_varname_sdf_uses_ruc_lsm}="${sdf_uses_ruc_lsm}" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/config_defaults.sh b/ush/config_defaults.sh index 6790d66861..6c9dc7b032 100644 --- a/ush/config_defaults.sh +++ b/ush/config_defaults.sh @@ -4,7 +4,7 @@ # This file sets the experiment's configuration variables (which are # global shell variables) to their default values. For many of these # variables, the valid values that they may take on are defined in the -# file $USHDIR/valid_param_vals.sh. +# file $USHDIR/valid_param_vals.py. # #----------------------------------------------------------------------- # @@ -45,7 +45,7 @@ RUN_ENVIR="nco" # supported platform, and you want to use the Rocoto workflow manager, # you will need set MACHINE="linux" and WORKFLOW_MANAGER="rocoto". This # combination will assume a Slurm batch manager when generating the XML. -# Please see ush/valid_param_vals.sh for a full list of supported +# Please see ush/valid_param_vals.py for a full list of supported # platforms. # # MACHINE_FILE: @@ -623,7 +623,7 @@ NDAS_OBS_DIR="" # conditions from the GFS started 6 hours earlier, then # EXTRN_MDL_LBCS_OFFSET_HRS=6. # Note: the default value is model-dependent and set in -# set_extrn_mdl_params.sh +# set_extrn_mdl_params.py # # FV3GFS_FILE_FMT_ICS: # If using the FV3GFS model as the source of the ICs (i.e. if EXTRN_MDL_NAME_ICS @@ -1016,7 +1016,7 @@ GFDLgrid_USE_NUM_CELLS_IN_FILENAMES="" # in generating the files with 0-cell-, 3-cell-, and 4-cell-wide halos; # they are not needed by the forecast model. # NOTE: Probably don't need to make ESGgrid_WIDE_HALO_WIDTH a user-specified -# variable. Just set it in the function set_gridparams_ESGgrid.sh. +# variable. Just set it in the function set_gridparams_ESGgrid.py. # # Note that: # @@ -1176,7 +1176,7 @@ WRTCMP_dy="" # commonly used set of grid-dependent parameters. The predefined grid # parameters are specified in the script # -# $HOMErrfs/ush/set_predef_grid_params.sh +# $HOMErrfs/ush/set_predef_grid_params.py # #----------------------------------------------------------------------- # @@ -1914,6 +1914,7 @@ LSM_SPP_LSCALE=( "150000" "150000" "150000" "150000" "150000" "150000" "150000" ISEED_LSM_SPP=( "9" ) LSM_SPP_VAR_LIST=( "smc" "vgf" "alb" "sal" "emi" "zol" "stc" ) LSM_SPP_MAG_LIST=( "0.017" "0.001" "0.001" "0.001" "0.001" "0.001" "0.2" ) +LSM_SPP_EACH_STEP="true" #Sets lndp_each_step=.true. # #----------------------------------------------------------------------- # diff --git a/ush/config_defaults.yaml b/ush/config_defaults.yaml deleted file mode 100644 index 6498ecf013..0000000000 --- a/ush/config_defaults.yaml +++ /dev/null @@ -1,2017 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file sets the experiment's configuration variables (which are -# global shell variables) to their default values. For many of these -# variables, the valid values that they may take on are defined in the -# file $USHDIR/valid_param_vals.sh. -# -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Set the RUN_ENVIR variable that is listed and described in the WCOSS -# Implementation Standards document: -# -# NCEP Central Operations -# WCOSS Implementation Standards -# April 19, 2022 -# Version 11.0.0 -# -# RUN_ENVIR is described in this document as follows: -# -# Set to "nco" if running in NCO's production environment. Used to -# distinguish between organizations. -# -# Valid values are "nco" and "community". Here, we use it to generate -# and run the experiment either in NCO mode (if RUN_ENVIR is set to "nco") -# or in community mode (if RUN_ENVIR is set to "community"). This has -# implications on the experiment variables that need to be set and the -# the directory structure used. -# -#----------------------------------------------------------------------- -# -RUN_ENVIR: "nco" -# -#----------------------------------------------------------------------- -# -# mach_doc_start -# Set machine and queue parameters. Definitions: -# -# MACHINE: -# Machine on which the workflow will run. If you are NOT on a named, -# supported platform, and you want to use the Rocoto workflow manager, -# you will need set MACHINE: "linux" and WORKFLOW_MANAGER: "rocoto". This -# combination will assume a Slurm batch manager when generating the XML. -# Please see ush/valid_param_vals.sh for a full list of supported -# platforms. -# -# MACHINE_FILE: -# Path to a configuration file with machine-specific settings. If none -# is provided, setup.sh will attempt to set the path to for a supported -# platform. -# -# ACCOUNT: -# The account under which to submit jobs to the queue. -# -# WORKFLOW_MANAGER: -# The workflow manager to use (e.g. rocoto). This is set to "none" by -# default, but if the machine name is set to a platform that supports -# rocoto, this will be overwritten and set to "rocoto". If set -# explicitly to rocoto along with the use of the MACHINE=linux target, -# the configuration layer assumes a Slurm batch manager when generating -# the XML. Valid options: "rocoto" or "none" -# -# NCORES_PER_NODE: -# The number of cores available per node on the compute platform. Set -# for supported platforms in setup.sh, but is now also configurable for -# all platforms. -# -# LMOD_PATH: -# Path to the LMOD sh file on your Linux system. Is set automatically -# for supported machines. -# -# BUILD_MOD_FN: -# Name of alternative build module file to use if using an -# unsupported platform. Is set automatically for supported machines. -# -# WFLOW_MOD_FN: -# Name of alternative workflow module file to use if using an -# unsupported platform. Is set automatically for supported machines. -# -# SCHED: -# The job scheduler to use (e.g. slurm). Set this to an empty string in -# order for the experiment generation script to set it depending on the -# machine. -# -# PARTITION_DEFAULT: -# If using the slurm job scheduler (i.e. if SCHED is set to "slurm"), -# the default partition to which to submit workflow tasks. If a task -# does not have a specific variable that specifies the partition to which -# it will be submitted (e.g. PARTITION_HPSS, PARTITION_FCST; see below), -# it will be submitted to the partition specified by this variable. If -# this is not set or is set to an empty string, it will be (re)set to a -# machine-dependent value. This is not used if SCHED is not set to -# "slurm". -# -# QUEUE_DEFAULT: -# The default queue or QOS (if using the slurm job scheduler, where QOS -# is Quality of Service) to which workflow tasks are submitted. If a -# task does not have a specific variable that specifies the queue to which -# it will be submitted (e.g. QUEUE_HPSS, QUEUE_FCST; see below), it will -# be submitted to the queue specified by this variable. If this is not -# set or is set to an empty string, it will be (re)set to a machine- -# dependent value. -# -# PARTITION_HPSS: -# If using the slurm job scheduler (i.e. if SCHED is set to "slurm"), -# the partition to which the tasks that get or create links to external -# model files [which are needed to generate initial conditions (ICs) and -# lateral boundary conditions (LBCs)] are submitted. If this is not set -# or is set to an empty string, it will be (re)set to a machine-dependent -# value. This is not used if SCHED is not set to "slurm". -# -# QUEUE_HPSS: -# The queue or QOS to which the tasks that get or create links to external -# model files [which are needed to generate initial conditions (ICs) and -# lateral boundary conditions (LBCs)] are submitted. If this is not set -# or is set to an empty string, it will be (re)set to a machine-dependent -# value. -# -# PARTITION_FCST: -# If using the slurm job scheduler (i.e. if SCHED is set to "slurm"), -# the partition to which the task that runs forecasts is submitted. If -# this is not set or set to an empty string, it will be (re)set to a -# machine-dependent value. This is not used if SCHED is not set to -# "slurm". -# -# QUEUE_FCST: -# The queue or QOS to which the task that runs a forecast is submitted. -# If this is not set or set to an empty string, it will be (re)set to a -# machine-dependent value. -# -# mach_doc_end -# -#----------------------------------------------------------------------- -# -MACHINE: "BIG_COMPUTER" -MACHINE_FILE: "" -ACCOUNT: "project_name" -WORKFLOW_MANAGER: "none" -NCORES_PER_NODE: "" -LMOD_PATH: "" -BUILD_MOD_FN: "" -WFLOW_MOD_FN: "" -SCHED: "" -PARTITION_DEFAULT: "" -QUEUE_DEFAULT: "" -PARTITION_HPSS: "" -QUEUE_HPSS: "" -PARTITION_FCST: "" -QUEUE_FCST: "" -# -#----------------------------------------------------------------------- -# -# Set run commands for platforms without a workflow manager. These values -# will be ignored unless WORKFLOW_MANAGER: "none". Definitions: -# -# RUN_CMD_UTILS: -# The run command for pre-processing utilities (shave, orog, sfc_climo_gen, -# etc.) Can be left blank for smaller domains, in which case the executables -# will run without MPI. -# -# RUN_CMD_FCST: -# The run command for the model forecast step. This will be appended to -# the end of the variable definitions file, so it can reference other -# variables. -# -# RUN_CMD_POST: -# The run command for post-processing (UPP). Can be left blank for smaller -# domains, in which case UPP will run without MPI. -# -#----------------------------------------------------------------------- -# -RUN_CMD_UTILS: "mpirun -np 1" -RUN_CMD_FCST: "mpirun -np ${PE_MEMBER01}" -RUN_CMD_POST: "mpirun -np 1" -# -#----------------------------------------------------------------------- -# -# Set cron-associated parameters. Definitions: -# -# USE_CRON_TO_RELAUNCH: -# Flag that determines whether or not to add a line to the user's cron -# table to call the experiment launch script every CRON_RELAUNCH_INTVL_MNTS -# minutes. -# -# CRON_RELAUNCH_INTVL_MNTS: -# The interval (in minutes) between successive calls of the experiment -# launch script by a cron job to (re)launch the experiment (so that the -# workflow for the experiment kicks off where it left off). -# -#----------------------------------------------------------------------- -# -USE_CRON_TO_RELAUNCH: "FALSE" -CRON_RELAUNCH_INTVL_MNTS: "03" -# -#----------------------------------------------------------------------- -# -# dir_doc_start -# Set directories. Definitions: -# -# EXPT_BASEDIR: -# The base directory in which the experiment directory will be created. -# If this is not specified or if it is set to an empty string, it will -# default to ${HOMErrfs}/../expt_dirs. -# -# EXPT_SUBDIR: -# The name that the experiment directory (without the full path) will -# have. The full path to the experiment directory, which will be contained -# in the variable EXPTDIR, will be: -# -# EXPTDIR: "${EXPT_BASEDIR}/${EXPT_SUBDIR}" -# -# This cannot be empty. If set to a null string here, it must be set to -# a (non-empty) value in the user-defined experiment configuration file. -# -# dir_doc_end -# -# EXEC_SUBDIR: -# The name of the subdirectory of ufs-srweather-app where executables are -# installed. -#----------------------------------------------------------------------- -# -EXPT_BASEDIR: "" -EXPT_SUBDIR: "" -EXEC_SUBDIR: "bin" -# -#----------------------------------------------------------------------- -# -# Set variables that are only used in NCO mode (i.e. when RUN_ENVIR is -# set to "nco"). Definitions: -# -# COMIN: -# Directory containing files generated by the external model (FV3GFS, NAM, -# HRRR, etc) that the initial and lateral boundary condition generation tasks -# need in order to create initial and boundary condition files for a given -# cycle on the native FV3-LAM grid. -# -# envir, NET, model_ver, RUN: -# Standard environment variables defined in the NCEP Central Operations WCOSS -# Implementation Standards document as follows: -# -# envir: -# Set to "test" during the initial testing phase, "para" when running -# in parallel (on a schedule), and "prod" in production. -# -# NET: -# Model name (first level of com directory structure) -# -# model_ver: -# Version number of package in three digits (second level of com directory) -# -# RUN: -# Name of model run (third level of com directory structure). -# In general, same as $NET -# -# STMP: -# The beginning portion of the directory that will contain cycle-dependent -# model input files, symlinks to cycle-independent input files, and raw -# (i.e. before post-processing) forecast output files for a given cycle. -# For a cycle that starts on the date specified by yyyymmdd and hour -# specified by hh (where yyyymmdd and hh are as described above) [so that -# the cycle date (cdate) is given by cdate: "${yyyymmdd}${hh}"], the -# directory in which the aforementioned files will be located is: -# -# $STMP/tmpnwprd/$RUN/$cdate -# -# PTMP: -# The beginning portion of the directory that will contain the output -# files from the post-processor (UPP) for a given cycle. For a cycle -# that starts on the date specified by yyyymmdd and hour specified by hh -# (where yyyymmdd and hh are as described above), the directory in which -# the UPP output files will be placed will be: -# -# $PTMP/com/$NET/$model_ver/$RUN.$yyyymmdd/$hh -# -#----------------------------------------------------------------------- -# -COMIN: "/path/of/directory/containing/data/files/for/IC/LBCS" -envir: "para" -NET: "rrfs" -model_ver: "v1.0.0" -RUN: "rrfs" -STMP: "/base/path/of/directory/containing/model/input/and/raw/output/files" -PTMP: "/base/path/of/directory/containing/postprocessed/output/files" -# -#----------------------------------------------------------------------- -# -# Set the separator character(s) to use in the names of the grid, mosaic, -# and orography fixed files. -# -# Ideally, the same separator should be used in the names of these fixed -# files as the surface climatology fixed files (which always use a "." -# as the separator), i.e. ideally, DOT_OR_USCORE should be set to "." -# -#----------------------------------------------------------------------- -# -DOT_OR_USCORE: "_" -# -#----------------------------------------------------------------------- -# -# Set file names. Definitions: -# -# EXPT_CONFIG_FN: -# Name of the user-specified configuration file for the forecast experiment. -# -# RGNL_GRID_NML_FN: -# Name of file containing the namelist settings for the code that generates -# a "ESGgrid" type of regional grid. -# -# FV3_NML_BASE_SUITE_FN: -# Name of Fortran namelist file containing the forecast model's base suite -# namelist, i.e. the portion of the namelist that is common to all physics -# suites. -# -# FV3_NML_YAML_CONFIG_FN: -# Name of YAML configuration file containing the forecast model's namelist -# settings for various physics suites. -# -# FV3_NML_BASE_ENS_FN: -# Name of Fortran namelist file containing the forecast model's base -# ensemble namelist, i.e. the the namelist file that is the starting point -# from which the namelist files for each of the enesemble members are -# generated. -# -# FV3_EXEC_FN: -# Name to use for the forecast model executable when it is copied from -# the directory in which it is created in the build step to the executables -# directory (EXECDIR; this is set during experiment generation). -# -# DIAG_TABLE_TMPL_FN: -# Name of a template file that specifies the output fields of the forecast -# model (ufs-weather-model: diag_table) followed by [dot_ccpp_phys_suite]. -# Its default value is the name of the file that the ufs weather model -# expects to read in. -# -# FIELD_TABLE_TMPL_FN: -# Name of a template file that specifies the tracers in IC/LBC files of the -# forecast model (ufs-weather-mode: field_table) followed by [dot_ccpp_phys_suite]. -# Its default value is the name of the file that the ufs weather model expects -# to read in. -# -# MODEL_CONFIG_TMPL_FN: -# Name of a template file that contains settings and configurations for the -# NUOPC/ESMF main component (ufs-weather-model: model_config). Its default -# value is the name of the file that the ufs weather model expects to read in. -# -# NEMS_CONFIG_TMPL_FN: -# Name of a template file that contains information about the various NEMS -# components and their run sequence (ufs-weather-model: nems.configure). -# Its default value is the name of the file that the ufs weather model expects -# to read in. -# -# FCST_MODEL: -# Name of forecast model (default=ufs-weather-model) -# -# WFLOW_XML_FN: -# Name of the rocoto workflow XML file that the experiment generation -# script creates and that defines the workflow for the experiment. -# -# GLOBAL_VAR_DEFNS_FN: -# Name of file (a shell script) containing the defintions of the primary -# experiment variables (parameters) defined in this default configuration -# script and in the user-specified configuration as well as secondary -# experiment variables generated by the experiment generation script. -# This file is sourced by many scripts (e.g. the J-job scripts corresponding -# to each workflow task) in order to make all the experiment variables -# available in those scripts. -# -# EXTRN_MDL_VAR_DEFNS_FN: -# Name of file (a shell script) containing the defintions of variables -# associated with the external model from which ICs or LBCs are generated. This -# file is created by the GET_EXTRN_*_TN task because the values of the variables -# it contains are not known before this task runs. The file is then sourced by -# the MAKE_ICS_TN and MAKE_LBCS_TN tasks. -# -# WFLOW_LAUNCH_SCRIPT_FN: -# Name of the script that can be used to (re)launch the experiment's rocoto -# workflow. -# -# WFLOW_LAUNCH_LOG_FN: -# Name of the log file that contains the output from successive calls to -# the workflow launch script (WFLOW_LAUNCH_SCRIPT_FN). -# -#----------------------------------------------------------------------- -# -EXPT_CONFIG_FN: "config.sh" - -RGNL_GRID_NML_FN: "regional_grid.nml" - -FV3_NML_BASE_SUITE_FN: "input.nml.FV3" -FV3_NML_YAML_CONFIG_FN: "FV3.input.yml" -FV3_NML_BASE_ENS_FN: "input.nml.base_ens" -FV3_EXEC_FN: "ufs_model" - -DATA_TABLE_TMPL_FN: "" -DIAG_TABLE_TMPL_FN: "" -FIELD_TABLE_TMPL_FN: "" -MODEL_CONFIG_TMPL_FN: "" -NEMS_CONFIG_TMPL_FN: "" - -FCST_MODEL: "ufs-weather-model" -WFLOW_XML_FN: "FV3LAM_wflow.xml" -GLOBAL_VAR_DEFNS_FN: "var_defns.sh" -EXTRN_MDL_VAR_DEFNS_FN: "extrn_mdl_var_defns.sh" -WFLOW_LAUNCH_SCRIPT_FN: "launch_FV3LAM_wflow.sh" -WFLOW_LAUNCH_LOG_FN: "log.launch_FV3LAM_wflow" -# -#----------------------------------------------------------------------- -# -# Set output file name. Definitions: -# -# POST_OUTPUT_DOMAIN_NAME: -# Domain name used in naming the output files of run_post by UPP or inline post. -# Output file name: $NET.tHHz.[var_name].f###.$POST_OUTPUT_DOMAIN_NAME.grib2 -# -#----------------------------------------------------------------------- -# -POST_OUTPUT_DOMAIN_NAME: "" -# -#----------------------------------------------------------------------- -# -# Set forecast parameters. Definitions: -# -# DATE_FIRST_CYCL: -# Starting date of the first forecast in the set of forecasts to run. -# Format is "YYYYMMDD". Note that this does not include the hour-of-day. -# -# DATE_LAST_CYCL: -# Starting date of the last forecast in the set of forecasts to run. -# Format is "YYYYMMDD". Note that this does not include the hour-of-day. -# -# CYCL_HRS: -# An array containing the hours of the day at which to launch forecasts. -# Forecasts are launched at these hours on each day from DATE_FIRST_CYCL -# to DATE_LAST_CYCL, inclusive. Each element of this array must be a -# two-digit string representing an integer that is less than or equal to -# 23, e.g. "00", "03", "12", "23". -# -# INCR_CYCL_FREQ: -# Increment in hours for Cycle Frequency (cycl_freq). -# Default is 24, which means cycle_freq=24:00:00 -# -# FCST_LEN_HRS: -# The length of each forecast, in integer hours. -# -#----------------------------------------------------------------------- -# -DATE_FIRST_CYCL: "YYYYMMDD" -DATE_LAST_CYCL: "YYYYMMDD" -CYCL_HRS: [ "HH1", "HH2" ] -INCR_CYCL_FREQ: "24" -FCST_LEN_HRS: "24" -# -#----------------------------------------------------------------------- -# -# Set model_configure parameters. Definitions: -# -# DT_ATMOS: -# The main forecast model integraton time step. As described in the -# forecast model documentation, "It corresponds to the frequency with -# which the top level routine in the dynamics is called as well as the -# frequency with which the physics is called." -# -# CPL: parameter for coupling -# (set automatically based on FCST_MODEL in ush/setup.sh) -# (ufs-weather-model:FALSE, fv3gfs_aqm:TRUE) -# -# RESTART_INTERVAL: -# frequency of the output restart files (unit:hour). -# Default=0: restart files are produced at the end of a forecast run -# For example, RESTART_INTERVAL: "1": restart files are produced every hour -# with the prefix "YYYYMMDD.HHmmSS." in the RESTART directory -# -# WRITE_DOPOST: -# Flag that determines whether or not to use the INLINE POST option -# When TRUE, force to turn off run_post (RUN_TASK_RUN_POST=FALSE) in setup.sh -# -#----------------------------------------------------------------------- -# -DT_ATMOS: "" -RESTART_INTERVAL: "0" -WRITE_DOPOST: "FALSE" -# -#----------------------------------------------------------------------- -# -# Set METplus parameters. Definitions: -# -# MODEL: -# String that specifies a descriptive name for the model being verified. -# -# MET_INSTALL_DIR: -# Location to top-level directory of MET installation. -# -# METPLUS_PATH: -# Location to top-level directory of METplus installation. -# -# CCPA_OBS_DIR: -# User-specified location of top-level directory where CCPA hourly -# precipitation files used by METplus are located. This parameter needs -# to be set for both user-provided observations and for observations -# that are retrieved from the NOAA HPSS (if the user has access) via -# the get_obs_ccpa_tn task (activated in workflow by setting -# RUN_TASK_GET_OBS_CCPA="TRUE"). In the case of pulling observations -# directly from NOAA HPSS, the data retrieved will be placed in this -# directory. Please note, this path must be defind as -# /full-path-to-obs/ccpa/proc. METplus is configured to verify 01-, -# 03-, 06-, and 24-h accumulated precipitation using hourly CCPA files. -# METplus configuration files require the use of predetermined directory -# structure and file names. Therefore, if the CCPA files are user -# provided, they need to follow the anticipated naming structure: -# {YYYYMMDD}/ccpa.t{HH}z.01h.hrap.conus.gb2, where YYYY is the 4-digit -# valid year, MM the 2-digit valid month, DD the 2-digit valid day of -# the month, and HH the 2-digit valid hour of the day. In addition, a -# caveat is noted for using hourly CCPA data. There is a problem with -# the valid time in the metadata for files valid from 19 - 00 UTC (or -# files under the '00' directory). The script to pull the CCPA data -# from the NOAA HPSS has an example of how to account for this as well -# as organizing the data into a more intuitive format: -# regional_workflow/scripts/exregional_get_ccpa_files.sh. When a fix -# is provided, it will be accounted for in the -# exregional_get_ccpa_files.sh script. -# -# MRMS_OBS_DIR: -# User-specified location of top-level directory where MRMS composite -# reflectivity files used by METplus are located. This parameter needs -# to be set for both user-provided observations and for observations -# that are retrieved from the NOAA HPSS (if the user has access) via the -# get_obs_mrms_tn task (activated in workflow by setting -# RUN_TASK_GET_OBS_MRMS="TRUE"). In the case of pulling observations -# directly from NOAA HPSS, the data retrieved will be placed in this -# directory. Please note, this path must be defind as -# /full-path-to-obs/mrms/proc. METplus configuration files require the -# use of predetermined directory structure and file names. Therefore, if -# the MRMS files are user provided, they need to follow the anticipated -# naming structure: -# {YYYYMMDD}/MergedReflectivityQCComposite_00.50_{YYYYMMDD}-{HH}{mm}{SS}.grib2, -# where YYYY is the 4-digit valid year, MM the 2-digit valid month, DD -# the 2-digit valid day of the month, HH the 2-digit valid hour of the -# day, mm the 2-digit valid minutes of the hour, and SS is the two-digit -# valid seconds of the hour. In addition, METplus is configured to look -# for a MRMS composite reflectivity file for the valid time of the -# forecast being verified; since MRMS composite reflectivity files do -# not always exactly match the valid time, a script, within the main -# script to retrieve MRMS data from the NOAA HPSS, is used to identify -# and rename the MRMS composite reflectivity file to match the valid -# time of the forecast. The script to pull the MRMS data from the NOAA -# HPSS has an example of the expected file naming structure: -# regional_workflow/scripts/exregional_get_mrms_files.sh. This script -# calls the script used to identify the MRMS file closest to the valid -# time: regional_workflow/ush/mrms_pull_topofhour.py. -# -# NDAS_OBS_DIR: -# User-specified location of top-level directory where NDAS prepbufr -# files used by METplus are located. This parameter needs to be set for -# both user-provided observations and for observations that are -# retrieved from the NOAA HPSS (if the user has access) via the -# get_obs_ndas_tn task (activated in workflow by setting  -# RUN_TASK_GET_OBS_NDAS="TRUE"). In the case of pulling observations -# directly from NOAA HPSS, the data retrieved will be placed in this -# directory. Please note, this path must be defind as -# /full-path-to-obs/ndas/proc. METplus is configured to verify -# near-surface variables hourly and upper-air variables at times valid -# at 00 and 12 UTC with NDAS prepbufr files. METplus configuration files -# require the use of predetermined file names. Therefore, if the NDAS -# files are user provided, they need to follow the anticipated naming -# structure: prepbufr.ndas.{YYYYMMDDHH}, where YYYY is the 4-digit valid -# year, MM the 2-digit valid month, DD the 2-digit valid day of the -# month, and HH the 2-digit valid hour of the day. The script to pull -# the NDAS data from the NOAA HPSS has an example of how to rename the -# NDAS data into a more intuitive format with the valid time listed in -# the file name: regional_workflow/scripts/exregional_get_ndas_files.sh -# -#----------------------------------------------------------------------- -# -MODEL: "" -MET_INSTALL_DIR: "" -MET_BIN_EXEC: "bin" -METPLUS_PATH: "" -CCPA_OBS_DIR: "" -MRMS_OBS_DIR: "" -NDAS_OBS_DIR: "" -# -#----------------------------------------------------------------------- -# -# Set initial and lateral boundary condition generation parameters. -# Definitions: -# -# EXTRN_MDL_NAME_ICS: -#`The name of the external model that will provide fields from which -# initial condition (including and surface) files will be generated for -# input into the forecast model. -# -# EXTRN_MDL_NAME_LBCS: -#`The name of the external model that will provide fields from which -# lateral boundary condition (LBC) files will be generated for input into -# the forecast model. -# -# LBC_SPEC_INTVL_HRS: -# The interval (in integer hours) with which LBC files will be generated. -# We will refer to this as the boundary update interval. Note that the -# model specified in EXTRN_MDL_NAME_LBCS must have data available at a -# frequency greater than or equal to that implied by LBC_SPEC_INTVL_HRS. -# For example, if LBC_SPEC_INTVL_HRS is set to 6, then the model must have -# data availble at least every 6 hours. It is up to the user to ensure -# that this is the case. -# -# EXTRN_MDL_ICS_OFFSET_HRS: -# Users may wish to start a forecast from a forecast of a previous cycle -# of an external model. This variable sets the number of hours earlier -# the external model started than when the FV3 forecast configured here -# should start. For example, the forecast should start from a 6 hour -# forecast of the GFS, then EXTRN_MDL_ICS_OFFSET_HRS=6. - -# EXTRN_MDL_LBCS_OFFSET_HRS: -# Users may wish to use lateral boundary conditions from a forecast that -# was started earlier than the initial time for the FV3 forecast -# configured here. This variable sets the number of hours earlier -# the external model started than when the FV3 forecast configured here -# should start. For example, the forecast should use lateral boundary -# conditions from the GFS started 6 hours earlier, then -# EXTRN_MDL_LBCS_OFFSET_HRS=6. -# Note: the default value is model-dependent and set in -# set_extrn_mdl_params.sh -# -# FV3GFS_FILE_FMT_ICS: -# If using the FV3GFS model as the source of the ICs (i.e. if EXTRN_MDL_NAME_ICS -# is set to "FV3GFS"), this variable specifies the format of the model -# files to use when generating the ICs. -# -# FV3GFS_FILE_FMT_LBCS: -# If using the FV3GFS model as the source of the LBCs (i.e. if -# EXTRN_MDL_NAME_LBCS is set to "FV3GFS"), this variable specifies the -# format of the model files to use when generating the LBCs. -# -#----------------------------------------------------------------------- -# -EXTRN_MDL_NAME_ICS: "FV3GFS" -EXTRN_MDL_NAME_LBCS: "FV3GFS" -LBC_SPEC_INTVL_HRS: "6" -EXTRN_MDL_ICS_OFFSET_HRS: "0" -EXTRN_MDL_LBCS_OFFSET_HRS: "" -FV3GFS_FILE_FMT_ICS: "nemsio" -FV3GFS_FILE_FMT_LBCS: "nemsio" -# -#----------------------------------------------------------------------- -# -# Base directories in which to search for external model files. -# -# EXTRN_MDL_SYSBASEDIR_ICS: -# Base directory on the local machine containing external model files for -# generating ICs on the native grid. The way the full path containing -# these files is constructed depends on the user-specified external model -# for ICs, i.e. EXTRN_MDL_NAME_ICS. -# -# EXTRN_MDL_SYSBASEDIR_LBCS: -# Same as EXTRN_MDL_SYSBASEDIR_ICS but for LBCs. -# -# Note that these must be defined as null strings here so that if they -# are specified by the user in the experiment configuration file, they -# remain set to those values, and if not, they get set to machine-dependent -# values. -# -#----------------------------------------------------------------------- -# -EXTRN_MDL_SYSBASEDIR_ICS: '' -EXTRN_MDL_SYSBASEDIR_LBCS: '' -# -#----------------------------------------------------------------------- -# -# User-staged external model directories and files. Definitions: -# -# USE_USER_STAGED_EXTRN_FILES: -# Flag that determines whether or not the workflow will look for the -# external model files needed for generating ICs and LBCs in user-specified -# directories. -# -# EXTRN_MDL_SOURCE_BASEDIR_ICS: -# Directory in which to look for external model files for generating ICs. -# If USE_USER_STAGED_EXTRN_FILES is set to "TRUE", the workflow looks in -# this directory (specifically, in a subdirectory under this directory -# named "YYYYMMDDHH" consisting of the starting date and cycle hour of -# the forecast, where YYYY is the 4-digit year, MM the 2-digit month, DD -# the 2-digit day of the month, and HH the 2-digit hour of the day) for -# the external model files specified by the array EXTRN_MDL_FILES_ICS -# (these files will be used to generate the ICs on the native FV3-LAM -# grid). This variable is not used if USE_USER_STAGED_EXTRN_FILES is -# set to "FALSE". -# -# EXTRN_MDL_FILES_ICS: -# Array containing templates of the names of the files to search for in -# the directory specified by EXTRN_MDL_SOURCE_BASEDIR_ICS. This -# variable is not used if USE_USER_STAGED_EXTRN_FILES is set to "FALSE". -# A single template should be used for each model file type that is -# meant to be used. You may use any of the Python-style templates -# allowed in the ush/retrieve_data.py script. To see the full list of -# supported templates, run that script with a -h option. Here is an example of -# setting FV3GFS nemsio input files: -# EXTRN_MDL_FILES_ICS=( gfs.t{hh}z.atmf{fcst_hr:03d}.nemsio \ -# gfs.t{hh}z.sfcf{fcst_hr:03d}.nemsio ) -# Or for FV3GFS grib files: -# EXTRN_MDL_FILES_ICS=( gfs.t{hh}z.pgrb2.0p25.f{fcst_hr:03d} ) -# -# EXTRN_MDL_SOURCE_BASEDIR_LBCS: -# Analogous to EXTRN_MDL_SOURCE_BASEDIR_ICS but for LBCs instead of ICs. -# -# EXTRN_MDL_FILES_LBCS: -# Analogous to EXTRN_MDL_FILES_ICS but for LBCs instead of ICs. -# -# EXTRN_MDL_DATA_STORES: -# A list of data stores where the scripts should look for external model -# data. The list is in priority order. If disk information is provided -# via USE_USER_STAGED_EXTRN_FILES or a known location on the platform, -# the disk location will be highest priority. Options are disk, hpss, -# aws, and nomads. -# -#----------------------------------------------------------------------- -# -USE_USER_STAGED_EXTRN_FILES: "FALSE" -EXTRN_MDL_SOURCE_BASEDIR_ICS: "" -EXTRN_MDL_FILES_ICS: "" -EXTRN_MDL_SOURCE_BASEDIR_LBCS: "" -EXTRN_MDL_FILES_LBCS: "" -EXTRN_MDL_DATA_STORES: "" -# -#----------------------------------------------------------------------- -# -# Set NOMADS online data associated parameters. Definitions: -# -# NOMADS: -# Flag controlling whether or not using NOMADS online data. -# -# NOMADS_file_type: -# Flag controlling the format of data. -# -#----------------------------------------------------------------------- -# -NOMADS: "FALSE" -NOMADS_file_type: "nemsio" -# -#----------------------------------------------------------------------- -# -# Set CCPP-associated parameters. Definitions: -# -# CCPP_PHYS_SUITE: -# The physics suite that will run using CCPP (Common Community Physics -# Package). The choice of physics suite determines the forecast model's -# namelist file, the diagnostics table file, the field table file, and -# the XML physics suite definition file that are staged in the experiment -# directory or the cycle directories under it. -# -#----------------------------------------------------------------------- -# -CCPP_PHYS_SUITE: "FV3_GFS_v16" -# -#----------------------------------------------------------------------- -# -# Set GRID_GEN_METHOD. This variable specifies the method to use to -# generate a regional grid in the horizontal. The values that it can -# take on are: -# -# * "GFDLgrid": -# This setting will generate a regional grid by first generating a -# "parent" global cubed-sphere grid and then taking a portion of tile -# 6 of that global grid -- referred to in the grid generation scripts -# as "tile 7" even though it doesn't correspond to a complete tile -- -# and using it as the regional grid. Note that the forecast is run on -# only on the regional grid (i.e. tile 7, not tiles 1 through 6). -# -# * "ESGgrid": -# This will generate a regional grid using the map projection developed -# by Jim Purser of EMC. -# -# Note that: -# -# 1) If the experiment is using one of the predefined grids (i.e. if -# PREDEF_GRID_NAME is set to the name of one of the valid predefined -# grids), then GRID_GEN_METHOD will be reset to the value of -# GRID_GEN_METHOD for that grid. This will happen regardless of -# whether or not GRID_GEN_METHOD is assigned a value in the user- -# specified experiment configuration file, i.e. any value it may be -# assigned in the experiment configuration file will be overwritten. -# -# 2) If the experiment is not using one of the predefined grids (i.e. if -# PREDEF_GRID_NAME is set to a null string), then GRID_GEN_METHOD must -# be set in the experiment configuration file. Otherwise, it will -# remain set to a null string, and the experiment generation will -# fail because the generation scripts check to ensure that it is set -# to a non-empty string before creating the experiment directory. -# -#----------------------------------------------------------------------- -# -GRID_GEN_METHOD: "" -# -#----------------------------------------------------------------------- -# -# Set parameters specific to the "GFDLgrid" method of generating a regional -# grid (i.e. for GRID_GEN_METHOD set to "GFDLgrid"). The following -# parameters will be used only if GRID_GEN_METHOD is set to "GFDLgrid". -# In this grid generation method: -# -# * The regional grid is defined with respect to a "parent" global cubed- -# sphere grid. Thus, all the parameters for a global cubed-sphere grid -# must be specified in order to define this parent global grid even -# though the model equations are not integrated on (they are integrated -# only on the regional grid). -# -# * GFDLgrid_RES is the number of grid cells in either one of the two -# horizontal directions x and y on any one of the 6 tiles of the parent -# global cubed-sphere grid. The mapping from GFDLgrid_RES to a nominal -# resolution (grid cell size) for a uniform global grid (i.e. Schmidt -# stretch factor GFDLgrid_STRETCH_FAC set to 1) for several values of -# GFDLgrid_RES is as follows: -# -# GFDLgrid_RES typical cell size -# ------------ ----------------- -# 192 50 km -# 384 25 km -# 768 13 km -# 1152 8.5 km -# 3072 3.2 km -# -# Note that these are only typical cell sizes. The actual cell size on -# the global grid tiles varies somewhat as we move across a tile. -# -# * Tile 6 has arbitrarily been chosen as the tile to use to orient the -# global parent grid on the sphere (Earth). This is done by specifying -# GFDLgrid_LON_T6_CTR and GFDLgrid_LAT_T6_CTR, which are the longitude -# and latitude (in degrees) of the center of tile 6. -# -# * Setting the Schmidt stretching factor GFDLgrid_STRETCH_FAC to a value -# greater than 1 shrinks tile 6, while setting it to a value less than -# 1 (but still greater than 0) expands it. The remaining 5 tiles change -# shape as necessary to maintain global coverage of the grid. -# -# * The cell size on a given global tile depends on both GFDLgrid_RES and -# GFDLgrid_STRETCH_FAC (since changing GFDLgrid_RES changes the number -# of cells in the tile, and changing GFDLgrid_STRETCH_FAC modifies the -# shape and size of the tile). -# -# * The regional grid is embedded within tile 6 (i.e. it doesn't extend -# beyond the boundary of tile 6). Its exact location within tile 6 is -# is determined by specifying the starting and ending i and j indices -# of the regional grid on tile 6, where i is the grid index in the x -# direction and j is the grid index in the y direction. These indices -# are stored in the variables -# -# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G -# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G -# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G -# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G -# -# * In the forecast model code and in the experiment generation and workflow -# scripts, for convenience the regional grid is denoted as "tile 7" even -# though it doesn't map back to one of the 6 faces of the cube from -# which the parent global grid is generated (it maps back to only a -# subregion on face 6 since it is wholly confined within tile 6). Tile -# 6 may be referred to as the "parent" tile of the regional grid. -# -# * GFDLgrid_REFINE_RATIO is the refinement ratio of the regional grid -# (tile 7) with respect to the grid on its parent tile (tile 6), i.e. -# it is the number of grid cells along the boundary of the regional grid -# that abut one cell on tile 6. Thus, the cell size on the regional -# grid depends not only on GFDLgrid_RES and GFDLgrid_STRETCH_FAC (because -# the cell size on tile 6 depends on these two parameters) but also on -# GFDLgrid_REFINE_RATIO. Note that as on the tiles of the global grid, -# the cell size on the regional grid is not uniform but varies as we -# move across the grid. -# -# Definitions of parameters that need to be specified when GRID_GEN_METHOD -# is set to "GFDLgrid": -# -# GFDLgrid_LON_T6_CTR: -# Longitude of the center of tile 6 (in degrees). -# -# GFDLgrid_LAT_T6_CTR: -# Latitude of the center of tile 6 (in degrees). -# -# GFDLgrid_RES: -# Number of points in each of the two horizontal directions (x and y) on -# each tile of the parent global grid. Note that the name of this parameter -# is really a misnomer because although it has the stirng "RES" (for -# "resolution") in its name, it specifies number of grid cells, not grid -# size (in say meters or kilometers). However, we keep this name in order -# to remain consistent with the usage of the word "resolution" in the -# global forecast model and other auxiliary codes. -# -# GFDLgrid_STRETCH_FAC: -# Stretching factor used in the Schmidt transformation applied to the -# parent cubed-sphere grid. -# -# GFDLgrid_REFINE_RATIO: -# Cell refinement ratio for the regional grid, i.e. the number of cells -# in either the x or y direction on the regional grid (tile 7) that abut -# one cell on its parent tile (tile 6). -# -# GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: -# i-index on tile 6 at which the regional grid (tile 7) starts. -# -# GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: -# i-index on tile 6 at which the regional grid (tile 7) ends. -# -# GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: -# j-index on tile 6 at which the regional grid (tile 7) starts. -# -# GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: -# j-index on tile 6 at which the regional grid (tile 7) ends. -# -# GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: -# Flag that determines the file naming convention to use for grid, orography, -# and surface climatology files (or, if using pregenerated files, the -# naming convention that was used to name these files). These files -# usually start with the string "C${RES}_", where RES is an integer. -# In the global forecast model, RES is the number of points in each of -# the two horizontal directions (x and y) on each tile of the global grid -# (defined here as GFDLgrid_RES). If this flag is set to "TRUE", RES will -# be set to GFDLgrid_RES just as in the global forecast model. If it is -# set to "FALSE", we calculate (in the grid generation task) an "equivalent -# global uniform cubed-sphere resolution" -- call it RES_EQUIV -- and -# then set RES equal to it. RES_EQUIV is the number of grid points in -# each of the x and y directions on each tile that a global UNIFORM (i.e. -# stretch factor of 1) cubed-sphere grid would have to have in order to -# have the same average grid size as the regional grid. This is a more -# useful indicator of the grid size because it takes into account the -# effects of GFDLgrid_RES, GFDLgrid_STRETCH_FAC, and GFDLgrid_REFINE_RATIO -# in determining the regional grid's typical grid size, whereas simply -# setting RES to GFDLgrid_RES doesn't take into account the effects of -# GFDLgrid_STRETCH_FAC and GFDLgrid_REFINE_RATIO on the regional grid's -# resolution. Nevertheless, some users still prefer to use GFDLgrid_RES -# in the file names, so we allow for that here by setting this flag to -# "TRUE". -# -# Note that: -# -# 1) If the experiment is using one of the predefined grids (i.e. if -# PREDEF_GRID_NAME is set to the name of one of the valid predefined -# grids), then: -# -# a) If the value of GRID_GEN_METHOD for that grid is "GFDLgrid", then -# these parameters will get reset to the values for that grid. -# This will happen regardless of whether or not they are assigned -# values in the user-specified experiment configuration file, i.e. -# any values they may be assigned in the experiment configuration -# file will be overwritten. -# -# b) If the value of GRID_GEN_METHOD for that grid is "ESGgrid", then -# these parameters will not be used and thus do not need to be reset -# to non-empty strings. -# -# 2) If the experiment is not using one of the predefined grids (i.e. if -# PREDEF_GRID_NAME is set to a null string), then: -# -# a) If GRID_GEN_METHOD is set to "GFDLgrid" in the user-specified -# experiment configuration file, then these parameters must be set -# in that configuration file. -# -# b) If GRID_GEN_METHOD is set to "ESGgrid" in the user-specified -# experiment configuration file, then these parameters will not be -# used and thus do not need to be reset to non-empty strings. -# -#----------------------------------------------------------------------- -# -GFDLgrid_LON_T6_CTR: "" -GFDLgrid_LAT_T6_CTR: "" -GFDLgrid_RES: "" -GFDLgrid_STRETCH_FAC: "" -GFDLgrid_REFINE_RATIO: "" -GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: "" -GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G: "" -GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: "" -GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: "" -GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: "" -# -#----------------------------------------------------------------------- -# -# Set parameters specific to the "ESGgrid" method of generating a regional -# grid (i.e. for GRID_GEN_METHOD set to "ESGgrid"). Definitions: -# -# ESGgrid_LON_CTR: -# The longitude of the center of the grid (in degrees). -# -# ESGgrid_LAT_CTR: -# The latitude of the center of the grid (in degrees). -# -# ESGgrid_DELX: -# The cell size in the zonal direction of the regional grid (in meters). -# -# ESGgrid_DELY: -# The cell size in the meridional direction of the regional grid (in -# meters). -# -# ESGgrid_NX: -# The number of cells in the zonal direction on the regional grid. -# -# ESGgrid_NY: -# The number of cells in the meridional direction on the regional grid. -# -# ESGgrid_WIDE_HALO_WIDTH: -# The width (in units of number of grid cells) of the halo to add around -# the regional grid before shaving the halo down to the width(s) expected -# by the forecast model. -# -# ESGgrid_PAZI: -# The rotational parameter for the ESG grid (in degrees). -# -# In order to generate grid files containing halos that are 3-cell and -# 4-cell wide and orography files with halos that are 0-cell and 3-cell -# wide (all of which are required as inputs to the forecast model), the -# grid and orography tasks first create files with halos around the regional -# domain of width ESGgrid_WIDE_HALO_WIDTH cells. These are first stored -# in files. The files are then read in and "shaved" down to obtain grid -# files with 3-cell-wide and 4-cell-wide halos and orography files with -# 0-cell-wide (i.e. no halo) and 3-cell-wide halos. For this reason, we -# refer to the original halo that then gets shaved down as the "wide" -# halo, i.e. because it is wider than the 0-cell-wide, 3-cell-wide, and -# 4-cell-wide halos that we will eventually end up with. Note that the -# grid and orography files with the wide halo are only needed as intermediates -# in generating the files with 0-cell-, 3-cell-, and 4-cell-wide halos; -# they are not needed by the forecast model. -# NOTE: Probably don't need to make ESGgrid_WIDE_HALO_WIDTH a user-specified -# variable. Just set it in the function set_gridparams_ESGgrid.sh. -# -# Note that: -# -# 1) If the experiment is using one of the predefined grids (i.e. if -# PREDEF_GRID_NAME is set to the name of one of the valid predefined -# grids), then: -# -# a) If the value of GRID_GEN_METHOD for that grid is "GFDLgrid", then -# these parameters will not be used and thus do not need to be reset -# to non-empty strings. -# -# b) If the value of GRID_GEN_METHOD for that grid is "ESGgrid", then -# these parameters will get reset to the values for that grid. -# This will happen regardless of whether or not they are assigned -# values in the user-specified experiment configuration file, i.e. -# any values they may be assigned in the experiment configuration -# file will be overwritten. -# -# 2) If the experiment is not using one of the predefined grids (i.e. if -# PREDEF_GRID_NAME is set to a null string), then: -# -# a) If GRID_GEN_METHOD is set to "GFDLgrid" in the user-specified -# experiment configuration file, then these parameters will not be -# used and thus do not need to be reset to non-empty strings. -# -# b) If GRID_GEN_METHOD is set to "ESGgrid" in the user-specified -# experiment configuration file, then these parameters must be set -# in that configuration file. -# -#----------------------------------------------------------------------- -# -ESGgrid_LON_CTR: "" -ESGgrid_LAT_CTR: "" -ESGgrid_DELX: "" -ESGgrid_DELY: "" -ESGgrid_NX: "" -ESGgrid_NY: "" -ESGgrid_WIDE_HALO_WIDTH: "" -ESGgrid_PAZI: "" -# -#----------------------------------------------------------------------- -# -# Set computational parameters for the forecast. Definitions: -# -# LAYOUT_X, LAYOUT_Y: -# The number of MPI tasks (processes) to use in the two horizontal -# directions (x and y) of the regional grid when running the forecast -# model. -# -# BLOCKSIZE: -# The amount of data that is passed into the cache at a time. -# -# Here, we set these parameters to null strings. This is so that, for -# any one of these parameters: -# -# 1) If the experiment is using a predefined grid, then if the user -# sets the parameter in the user-specified experiment configuration -# file (EXPT_CONFIG_FN), that value will be used in the forecast(s). -# Otherwise, the default value of the parameter for that predefined -# grid will be used. -# -# 2) If the experiment is not using a predefined grid (i.e. it is using -# a custom grid whose parameters are specified in the experiment -# configuration file), then the user must specify a value for the -# parameter in that configuration file. Otherwise, the parameter -# will remain set to a null string, and the experiment generation -# will fail because the generation scripts check to ensure that all -# the parameters defined in this section are set to non-empty strings -# before creating the experiment directory. -# -#----------------------------------------------------------------------- -# -LAYOUT_X: "" -LAYOUT_Y: "" -BLOCKSIZE: "" -# -#----------------------------------------------------------------------- -# -# Set write-component (quilting) parameters. Definitions: -# -# QUILTING: -# Flag that determines whether or not to use the write component for -# writing output files to disk. -# -# WRTCMP_write_groups: -# The number of write groups (i.e. groups of MPI tasks) to use in the -# write component. -# -# WRTCMP_write_tasks_per_group: -# The number of MPI tasks to allocate for each write group. -# -# PRINT_ESMF: -# Flag for whether or not to output extra (debugging) information from -# ESMF routines. Must be "TRUE" or "FALSE". Note that the write -# component uses ESMF library routines to interpolate from the native -# forecast model grid to the user-specified output grid (which is defined -# in the model configuration file MODEL_CONFIG_FN in the forecast's run -# directory). -# -#----------------------------------------------------------------------- -# -QUILTING: "TRUE" -PRINT_ESMF: "FALSE" - -WRTCMP_write_groups: "1" -WRTCMP_write_tasks_per_group: "20" - -WRTCMP_output_grid: "''" -WRTCMP_cen_lon: "" -WRTCMP_cen_lat: "" -WRTCMP_lon_lwr_left: "" -WRTCMP_lat_lwr_left: "" -# -# The following are used only for the case of WRTCMP_output_grid set to -# "'rotated_latlon'". -# -WRTCMP_lon_upr_rght: "" -WRTCMP_lat_upr_rght: "" -WRTCMP_dlon: "" -WRTCMP_dlat: "" -# -# The following are used only for the case of WRTCMP_output_grid set to -# "'lambert_conformal'". -# -WRTCMP_stdlat1: "" -WRTCMP_stdlat2: "" -WRTCMP_nx: "" -WRTCMP_ny: "" -WRTCMP_dx: "" -WRTCMP_dy: "" -# -#----------------------------------------------------------------------- -# -# Set PREDEF_GRID_NAME. This parameter specifies a predefined regional -# grid, as follows: -# -# * If PREDEF_GRID_NAME is set to a valid predefined grid name, the grid -# generation method GRID_GEN_METHOD, the (native) grid parameters, and -# the write-component grid parameters are set to predefined values for -# the specified grid, overwriting any settings of these parameters in -# the user-specified experiment configuration file. In addition, if -# the time step DT_ATMOS and the computational parameters LAYOUT_X, -# LAYOUT_Y, and BLOCKSIZE are not specified in that configuration file, -# they are also set to predefined values for the specified grid. -# -# * If PREDEF_GRID_NAME is set to an empty string, it implies the user -# is providing the native grid parameters in the user-specified -# experiment configuration file (EXPT_CONFIG_FN). In this case, the -# grid generation method GRID_GEN_METHOD, the native grid parameters, -# and the write-component grid parameters as well as the time step -# forecast model's main time step DT_ATMOS and the computational -# parameters LAYOUT_X, LAYOUT_Y, and BLOCKSIZE must be set in that -# configuration file; otherwise, the values of all of these parameters -# in this default experiment configuration file will be used. -# -# Setting PREDEF_GRID_NAME provides a convenient method of specifying a -# commonly used set of grid-dependent parameters. The predefined grid -# parameters are specified in the script -# -# $HOMErrfs/ush/set_predef_grid_params.sh -# -#----------------------------------------------------------------------- -# -PREDEF_GRID_NAME: "" -# -#----------------------------------------------------------------------- -# -# Set PREEXISTING_DIR_METHOD. This variable determines the method to use -# use to deal with preexisting directories [e.g ones generated by previous -# calls to the experiment generation script using the same experiment name -# (EXPT_SUBDIR) as the current experiment]. This variable must be set to -# one of "delete", "rename", and "quit". The resulting behavior for each -# of these values is as follows: -# -# * "delete": -# The preexisting directory is deleted and a new directory (having the -# same name as the original preexisting directory) is created. -# -# * "rename": -# The preexisting directory is renamed and a new directory (having the -# same name as the original preexisting directory) is created. The new -# name of the preexisting directory consists of its original name and -# the suffix "_oldNNN", where NNN is a 3-digit integer chosen to make -# the new name unique. -# -# * "quit": -# The preexisting directory is left unchanged, but execution of the -# currently running script is terminated. In this case, the preexisting -# directory must be dealt with manually before rerunning the script. -# -#----------------------------------------------------------------------- -# -PREEXISTING_DIR_METHOD: "delete" -# -#----------------------------------------------------------------------- -# -# Set flags for more detailed messages. Defintitions: -# -# VERBOSE: -# This is a flag that determines whether or not the experiment generation -# and workflow task scripts tend to print out more informational messages. -# -# DEBUG: -# This is a flag that determines whether or not very detailed debugging -# messages are printed to out. Note that if DEBUG is set to TRUE, then -# VERBOSE will also get reset to TRUE if it isn't already. -# -#----------------------------------------------------------------------- -# -VERBOSE: "TRUE" -DEBUG: "FALSE" -# -#----------------------------------------------------------------------- -# -# Set the names of the various rocoto workflow tasks. -# -#----------------------------------------------------------------------- -# -MAKE_GRID_TN: "make_grid" -MAKE_OROG_TN: "make_orog" -MAKE_SFC_CLIMO_TN: "make_sfc_climo" -GET_EXTRN_ICS_TN: "get_extrn_ics" -GET_EXTRN_LBCS_TN: "get_extrn_lbcs" -MAKE_ICS_TN: "make_ics" -MAKE_LBCS_TN: "make_lbcs" -RUN_FCST_TN: "run_fcst" -RUN_POST_TN: "run_post" -GET_OBS: "get_obs" -GET_OBS_CCPA_TN: "get_obs_ccpa" -GET_OBS_MRMS_TN: "get_obs_mrms" -GET_OBS_NDAS_TN: "get_obs_ndas" -VX_TN: "run_vx" -VX_GRIDSTAT_TN: "run_gridstatvx" -VX_GRIDSTAT_REFC_TN: "run_gridstatvx_refc" -VX_GRIDSTAT_RETOP_TN: "run_gridstatvx_retop" -VX_GRIDSTAT_03h_TN: "run_gridstatvx_03h" -VX_GRIDSTAT_06h_TN: "run_gridstatvx_06h" -VX_GRIDSTAT_24h_TN: "run_gridstatvx_24h" -VX_POINTSTAT_TN: "run_pointstatvx" -VX_ENSGRID_TN: "run_ensgridvx" -VX_ENSGRID_03h_TN: "run_ensgridvx_03h" -VX_ENSGRID_06h_TN: "run_ensgridvx_06h" -VX_ENSGRID_24h_TN: "run_ensgridvx_24h" -VX_ENSGRID_REFC_TN: "run_ensgridvx_refc" -VX_ENSGRID_RETOP_TN: "run_ensgridvx_retop" -VX_ENSGRID_MEAN_TN: "run_ensgridvx_mean" -VX_ENSGRID_PROB_TN: "run_ensgridvx_prob" -VX_ENSGRID_MEAN_03h_TN: "run_ensgridvx_mean_03h" -VX_ENSGRID_PROB_03h_TN: "run_ensgridvx_prob_03h" -VX_ENSGRID_MEAN_06h_TN: "run_ensgridvx_mean_06h" -VX_ENSGRID_PROB_06h_TN: "run_ensgridvx_prob_06h" -VX_ENSGRID_MEAN_24h_TN: "run_ensgridvx_mean_24h" -VX_ENSGRID_PROB_24h_TN: "run_ensgridvx_prob_24h" -VX_ENSGRID_PROB_REFC_TN: "run_ensgridvx_prob_refc" -VX_ENSGRID_PROB_RETOP_TN: "run_ensgridvx_prob_retop" -VX_ENSPOINT_TN: "run_enspointvx" -VX_ENSPOINT_MEAN_TN: "run_enspointvx_mean" -VX_ENSPOINT_PROB_TN: "run_enspointvx_prob" -# -#----------------------------------------------------------------------- -# -# Set flags (and related directories) that determine whether various -# workflow tasks should be run. Note that the MAKE_GRID_TN, MAKE_OROG_TN, -# and MAKE_SFC_CLIMO_TN are all cycle-independent tasks, i.e. if they -# are to be run, they do so only once at the beginning of the workflow -# before any cycles are run. Definitions: -# -# RUN_TASK_MAKE_GRID: -# Flag that determines whether the MAKE_GRID_TN task is to be run. If -# this is set to "TRUE", the grid generation task is run and new grid -# files are generated. If it is set to "FALSE", then the scripts look -# for pregenerated grid files in the directory specified by GRID_DIR -# (see below). -# -# GRID_DIR: -# The directory in which to look for pregenerated grid files if -# RUN_TASK_MAKE_GRID is set to "FALSE". -# -# RUN_TASK_MAKE_OROG: -# Same as RUN_TASK_MAKE_GRID but for the MAKE_OROG_TN task. -# -# OROG_DIR: -# Same as GRID_DIR but for the MAKE_OROG_TN task. -# -# RUN_TASK_MAKE_SFC_CLIMO: -# Same as RUN_TASK_MAKE_GRID but for the MAKE_SFC_CLIMO_TN task. -# -# SFC_CLIMO_DIR: -# Same as GRID_DIR but for the MAKE_SFC_CLIMO_TN task. -# -# DOMAIN_PREGEN_BASEDIR: -# The base directory containing pregenerated grid, orography, and surface -# climatology files. This is an alternative for setting GRID_DIR, -# OROG_DIR, and SFC_CLIMO_DIR individually -# -# For the pregenerated grid specified by PREDEF_GRID_NAME, -# these "fixed" files are located in: -# -# ${DOMAIN_PREGEN_BASEDIR}/${PREDEF_GRID_NAME} -# -# The workflow scripts will create a symlink in the experiment directory -# that will point to a subdirectory (having the name of the grid being -# used) under this directory. This variable should be set to a null -# string in this file, but it can be specified in the user-specified -# workflow configuration file (EXPT_CONFIG_FN). -# -# RUN_TASK_GET_EXTRN_ICS: -# Flag that determines whether the GET_EXTRN_ICS_TN task is to be run. -# -# RUN_TASK_GET_EXTRN_LBCS: -# Flag that determines whether the GET_EXTRN_LBCS_TN task is to be run. -# -# RUN_TASK_MAKE_ICS: -# Flag that determines whether the MAKE_ICS_TN task is to be run. -# -# RUN_TASK_MAKE_LBCS: -# Flag that determines whether the MAKE_LBCS_TN task is to be run. -# -# RUN_TASK_RUN_FCST: -# Flag that determines whether the RUN_FCST_TN task is to be run. -# -# RUN_TASK_RUN_POST: -# Flag that determines whether the RUN_POST_TN task is to be run. -# -# RUN_TASK_VX_GRIDSTAT: -# Flag that determines whether the grid-stat verification task is to be -# run. -# -# RUN_TASK_VX_POINTSTAT: -# Flag that determines whether the point-stat verification task is to be -# run. -# -# RUN_TASK_VX_ENSGRID: -# Flag that determines whether the ensemble-stat verification for gridded -# data task is to be run. -# -# RUN_TASK_VX_ENSPOINT: -# Flag that determines whether the ensemble point verification task is -# to be run. If this flag is set, both ensemble-stat point verification -# and point verification of ensemble-stat output is computed. -# -#----------------------------------------------------------------------- -# -RUN_TASK_MAKE_GRID: "TRUE" -GRID_DIR: "/path/to/pregenerated/grid/files" - -RUN_TASK_MAKE_OROG: "TRUE" -OROG_DIR: "/path/to/pregenerated/orog/files" - -RUN_TASK_MAKE_SFC_CLIMO: "TRUE" -SFC_CLIMO_DIR: "/path/to/pregenerated/surface/climo/files" - -DOMAIN_PREGEN_BASEDIR: "" - -RUN_TASK_GET_EXTRN_ICS: "TRUE" -RUN_TASK_GET_EXTRN_LBCS: "TRUE" -RUN_TASK_MAKE_ICS: "TRUE" -RUN_TASK_MAKE_LBCS: "TRUE" -RUN_TASK_RUN_FCST: "TRUE" -RUN_TASK_RUN_POST: "TRUE" - -RUN_TASK_GET_OBS_CCPA: "FALSE" -RUN_TASK_GET_OBS_MRMS: "FALSE" -RUN_TASK_GET_OBS_NDAS: "FALSE" -RUN_TASK_VX_GRIDSTAT: "FALSE" -RUN_TASK_VX_POINTSTAT: "FALSE" -RUN_TASK_VX_ENSGRID: "FALSE" -RUN_TASK_VX_ENSPOINT: "FALSE" -# -#----------------------------------------------------------------------- -# -# Flag that determines whether MERRA2 aerosol climatology data and -# lookup tables for optics properties are obtained -# -#----------------------------------------------------------------------- -# -USE_MERRA_CLIMO: "FALSE" -# -#----------------------------------------------------------------------- -# -# Set the array parameter containing the names of all the fields that the -# MAKE_SFC_CLIMO_TN task generates on the native FV3-LAM grid. -# -#----------------------------------------------------------------------- -# -SFC_CLIMO_FIELDS: [ -"facsf", -"maximum_snow_albedo", -"slope_type", -"snowfree_albedo", -"soil_type", -"substrate_temperature", -"vegetation_greenness", -"vegetation_type" -] -# -#----------------------------------------------------------------------- -# -# Set parameters associated with the fixed (i.e. static) files. Definitions: -# -# FIXgsm: -# System directory in which the majority of fixed (i.e. time-independent) -# files that are needed to run the FV3-LAM model are located -# -# FIXaer: -# System directory where MERRA2 aerosol climatology files are located -# -# FIXlut: -# System directory where the lookup tables for optics properties are located -# -# TOPO_DIR: -# The location on disk of the static input files used by the make_orog -# task (orog.x and shave.x). Can be the same as FIXgsm. -# -# SFC_CLIMO_INPUT_DIR: -# The location on disk of the static surface climatology input fields, used by -# sfc_climo_gen. These files are only used if RUN_TASK_MAKE_SFC_CLIMO=TRUE -# -# FNGLAC, ..., FNMSKH: -# Names of (some of the) global data files that are assumed to exist in -# a system directory specified (this directory is machine-dependent; -# the experiment generation scripts will set it and store it in the -# variable FIXgsm). These file names also appear directly in the forecast -# model's input namelist file. -# -# FIXgsm_FILES_TO_COPY_TO_FIXam: -# If not running in NCO mode, this array contains the names of the files -# to copy from the FIXgsm system directory to the FIXam directory under -# the experiment directory. Note that the last element has a dummy value. -# This last element will get reset by the workflow generation scripts to -# the name of the ozone production/loss file to copy from FIXgsm. The -# name of this file depends on the ozone parameterization being used, -# and that in turn depends on the CCPP physics suite specified for the -# experiment. Thus, the CCPP physics suite XML must first be read in to -# determine the ozone parameterizaton and then the name of the ozone -# production/loss file. These steps are carried out elsewhere (in one -# of the workflow generation scripts/functions). -# -# FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING: -# This array is used to set some of the namelist variables in the forecast -# model's namelist file that represent the relative or absolute paths of -# various fixed files (the first column of the array, where columns are -# delineated by the pipe symbol "|") to the full paths to these files in -# the FIXam directory derived from the corresponding workflow variables -# containing file names (the second column of the array). -# -# FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: -# This array is used to set some of the namelist variables in the forecast -# model's namelist file that represent the relative or absolute paths of -# various fixed files (the first column of the array, where columns are -# delineated by the pipe symbol "|") to the full paths to surface climatology -# files (on the native FV3-LAM grid) in the FIXLAM directory derived from -# the corresponding surface climatology fields (the second column of the -# array). -# -# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING: -# This array specifies the mapping to use between the symlinks that need -# to be created in each cycle directory (these are the "files" that FV3 -# looks for) and their targets in the FIXam directory. The first column -# of the array specifies the symlink to be created, and the second column -# specifies its target file in FIXam (where columns are delineated by the -# pipe symbol "|"). -# -#----------------------------------------------------------------------- -# -# Because the default values are dependent on the platform, we set these -# to a null string which will then be overwritten in setup.sh unless the -# user has specified a different value in config.sh -FIXgsm: "" -FIXaer: "" -FIXlut: "" -TOPO_DIR: "" -SFC_CLIMO_INPUT_DIR: "" - -FNGLAC: &FNGLAC "global_glacier.2x2.grb" -FNMXIC: &FNMXIC "global_maxice.2x2.grb" -FNTSFC: &FNTSFC "RTGSST.1982.2012.monthly.clim.grb" -FNSNOC: &FNSNOC "global_snoclim.1.875.grb" -FNZORC: &FNZORC "igbp" -FNAISC: &FNAISC "CFSR.SEAICE.1982.2012.monthly.clim.grb" -FNSMCC: &FNSMCC "global_soilmgldas.t126.384.190.grb" -FNMSKH: &FNMSKH "seaice_newland.grb" - -FIXgsm_FILES_TO_COPY_TO_FIXam: [ -*FNGLAC, -*FNMXIC, -*FNTSFC, -*FNSNOC, -*FNAISC, -*FNSMCC, -*FNMSKH, -"global_climaeropac_global.txt", -"fix_co2_proj/global_co2historicaldata_2010.txt", -"fix_co2_proj/global_co2historicaldata_2011.txt", -"fix_co2_proj/global_co2historicaldata_2012.txt", -"fix_co2_proj/global_co2historicaldata_2013.txt", -"fix_co2_proj/global_co2historicaldata_2014.txt", -"fix_co2_proj/global_co2historicaldata_2015.txt", -"fix_co2_proj/global_co2historicaldata_2016.txt", -"fix_co2_proj/global_co2historicaldata_2017.txt", -"fix_co2_proj/global_co2historicaldata_2018.txt", -"fix_co2_proj/global_co2historicaldata_2019.txt", -"fix_co2_proj/global_co2historicaldata_2020.txt", -"fix_co2_proj/global_co2historicaldata_2021.txt", -"global_co2historicaldata_glob.txt", -"co2monthlycyc.txt", -"global_h2o_pltc.f77", -"global_hyblev.l65.txt", -"global_zorclim.1x1.grb", -"global_sfc_emissivity_idx.txt", -"global_tg3clim.2.6x1.5.grb", -"global_solarconstant_noaa_an.txt", -"global_albedo4.1x1.grb", -"geo_em.d01.lat-lon.2.5m.HGT_M.nc", -"HGT.Beljaars_filtered.lat-lon.30s_res.nc", -"replace_with_FIXgsm_ozone_prodloss_filename" -] - -# -# It is possible to remove this as a workflow variable and make it only -# a local one since it is used in only one script. -# -FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING: [ -!join_str ["FNGLAC | ",*FNGLAC], -!join_str ["FNMXIC | ",*FNMXIC], -!join_str ["FNTSFC | ",*FNTSFC], -!join_str ["FNSNOC | ",*FNSNOC], -!join_str ["FNAISC | ",*FNAISC], -!join_str ["FNSMCC | ",*FNSMCC], -!join_str ["FNMSKH | ",*FNMSKH] -] -#"FNZORC | $FNZORC", - -FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING: [ -"FNALBC | snowfree_albedo", -"FNALBC2 | facsf", -"FNTG3C | substrate_temperature", -"FNVEGC | vegetation_greenness", -"FNVETC | vegetation_type", -"FNSOTC | soil_type", -"FNVMNC | vegetation_greenness", -"FNVMXC | vegetation_greenness", -"FNSLPC | slope_type", -"FNABSC | maximum_snow_albedo" -] - -CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING: [ -"aerosol.dat | global_climaeropac_global.txt", -"co2historicaldata_2010.txt | fix_co2_proj/global_co2historicaldata_2010.txt", -"co2historicaldata_2011.txt | fix_co2_proj/global_co2historicaldata_2011.txt", -"co2historicaldata_2012.txt | fix_co2_proj/global_co2historicaldata_2012.txt", -"co2historicaldata_2013.txt | fix_co2_proj/global_co2historicaldata_2013.txt", -"co2historicaldata_2014.txt | fix_co2_proj/global_co2historicaldata_2014.txt", -"co2historicaldata_2015.txt | fix_co2_proj/global_co2historicaldata_2015.txt", -"co2historicaldata_2016.txt | fix_co2_proj/global_co2historicaldata_2016.txt", -"co2historicaldata_2017.txt | fix_co2_proj/global_co2historicaldata_2017.txt", -"co2historicaldata_2018.txt | fix_co2_proj/global_co2historicaldata_2018.txt", -"co2historicaldata_2019.txt | fix_co2_proj/global_co2historicaldata_2019.txt", -"co2historicaldata_2020.txt | fix_co2_proj/global_co2historicaldata_2020.txt", -"co2historicaldata_2021.txt | fix_co2_proj/global_co2historicaldata_2021.txt", -"co2historicaldata_glob.txt | global_co2historicaldata_glob.txt", -"co2monthlycyc.txt | co2monthlycyc.txt", -"global_h2oprdlos.f77 | global_h2o_pltc.f77", -"global_albedo4.1x1.grb | global_albedo4.1x1.grb", -"global_zorclim.1x1.grb | global_zorclim.1x1.grb", -"global_tg3clim.2.6x1.5.grb | global_tg3clim.2.6x1.5.grb", -"sfc_emissivity_idx.txt | global_sfc_emissivity_idx.txt", -"solarconstant_noaa_an.txt | global_solarconstant_noaa_an.txt", -"global_o3prdlos.f77 | " -] -# -#----------------------------------------------------------------------- -# -# For each workflow task, set the parameters to pass to the job scheduler -# (e.g. slurm) that will submit a job for each task to be run. These -# parameters include the number of nodes to use to run the job, the MPI -# processes per node, the maximum walltime to allow for the job to complete, -# and the maximum number of times to attempt to run each task. -# -#----------------------------------------------------------------------- -# -# Number of nodes. -# -NNODES_MAKE_GRID: "1" -NNODES_MAKE_OROG: "1" -NNODES_MAKE_SFC_CLIMO: "2" -NNODES_GET_EXTRN_ICS: "1" -NNODES_GET_EXTRN_LBCS: "1" -NNODES_MAKE_ICS: "4" -NNODES_MAKE_LBCS: "4" -NNODES_RUN_FCST: "" # This is calculated in the workflow generation scripts, so no need to set here. -NNODES_RUN_POST: "2" -NNODES_GET_OBS_CCPA: "1" -NNODES_GET_OBS_MRMS: "1" -NNODES_GET_OBS_NDAS: "1" -NNODES_VX_GRIDSTAT: "1" -NNODES_VX_POINTSTAT: "1" -NNODES_VX_ENSGRID: "1" -NNODES_VX_ENSGRID_MEAN: "1" -NNODES_VX_ENSGRID_PROB: "1" -NNODES_VX_ENSPOINT: "1" -NNODES_VX_ENSPOINT_MEAN: "1" -NNODES_VX_ENSPOINT_PROB: "1" -# -# Number of MPI processes per node. -# -PPN_MAKE_GRID: "24" -PPN_MAKE_OROG: "24" -PPN_MAKE_SFC_CLIMO: "24" -PPN_GET_EXTRN_ICS: "1" -PPN_GET_EXTRN_LBCS: "1" -PPN_MAKE_ICS: "12" -PPN_MAKE_LBCS: "12" -PPN_RUN_FCST: "" # will be calculated from NCORES_PER_NODE and OMP_NUM_THREADS in setup.sh -PPN_RUN_POST: "24" -PPN_GET_OBS_CCPA: "1" -PPN_GET_OBS_MRMS: "1" -PPN_GET_OBS_NDAS: "1" -PPN_VX_GRIDSTAT: "1" -PPN_VX_POINTSTAT: "1" -PPN_VX_ENSGRID: "1" -PPN_VX_ENSGRID_MEAN: "1" -PPN_VX_ENSGRID_PROB: "1" -PPN_VX_ENSPOINT: "1" -PPN_VX_ENSPOINT_MEAN: "1" -PPN_VX_ENSPOINT_PROB: "1" -# -# Walltimes. -# -WTIME_MAKE_GRID: "00:20:00" -WTIME_MAKE_OROG: "01:00:00" -WTIME_MAKE_SFC_CLIMO: "00:20:00" -WTIME_GET_EXTRN_ICS: "00:45:00" -WTIME_GET_EXTRN_LBCS: "00:45:00" -WTIME_MAKE_ICS: "00:30:00" -WTIME_MAKE_LBCS: "00:30:00" -WTIME_RUN_FCST: "04:30:00" -WTIME_RUN_POST: "00:15:00" -WTIME_GET_OBS_CCPA: "00:45:00" -WTIME_GET_OBS_MRMS: "00:45:00" -WTIME_GET_OBS_NDAS: "02:00:00" -WTIME_VX_GRIDSTAT: "02:00:00" -WTIME_VX_POINTSTAT: "01:00:00" -WTIME_VX_ENSGRID: "01:00:00" -WTIME_VX_ENSGRID_MEAN: "01:00:00" -WTIME_VX_ENSGRID_PROB: "01:00:00" -WTIME_VX_ENSPOINT: "01:00:00" -WTIME_VX_ENSPOINT_MEAN: "01:00:00" -WTIME_VX_ENSPOINT_PROB: "01:00:00" -# -# Maximum number of attempts. -# -MAXTRIES_MAKE_GRID: "2" -MAXTRIES_MAKE_OROG: "2" -MAXTRIES_MAKE_SFC_CLIMO: "2" -MAXTRIES_GET_EXTRN_ICS: "1" -MAXTRIES_GET_EXTRN_LBCS: "1" -MAXTRIES_MAKE_ICS: "1" -MAXTRIES_MAKE_LBCS: "1" -MAXTRIES_RUN_FCST: "1" -MAXTRIES_RUN_POST: "2" -MAXTRIES_GET_OBS_CCPA: "1" -MAXTRIES_GET_OBS_MRMS: "1" -MAXTRIES_GET_OBS_NDAS: "1" -MAXTRIES_VX_GRIDSTAT: "1" -MAXTRIES_VX_GRIDSTAT_REFC: "1" -MAXTRIES_VX_GRIDSTAT_RETOP: "1" -MAXTRIES_VX_GRIDSTAT_03h: "1" -MAXTRIES_VX_GRIDSTAT_06h: "1" -MAXTRIES_VX_GRIDSTAT_24h: "1" -MAXTRIES_VX_POINTSTAT: "1" -MAXTRIES_VX_ENSGRID: "1" -MAXTRIES_VX_ENSGRID_REFC: "1" -MAXTRIES_VX_ENSGRID_RETOP: "1" -MAXTRIES_VX_ENSGRID_03h: "1" -MAXTRIES_VX_ENSGRID_06h: "1" -MAXTRIES_VX_ENSGRID_24h: "1" -MAXTRIES_VX_ENSGRID_MEAN: "1" -MAXTRIES_VX_ENSGRID_PROB: "1" -MAXTRIES_VX_ENSGRID_MEAN_03h: "1" -MAXTRIES_VX_ENSGRID_PROB_03h: "1" -MAXTRIES_VX_ENSGRID_MEAN_06h: "1" -MAXTRIES_VX_ENSGRID_PROB_06h: "1" -MAXTRIES_VX_ENSGRID_MEAN_24h: "1" -MAXTRIES_VX_ENSGRID_PROB_24h: "1" -MAXTRIES_VX_ENSGRID_PROB_REFC: "1" -MAXTRIES_VX_ENSGRID_PROB_RETOP: "1" -MAXTRIES_VX_ENSPOINT: "1" -MAXTRIES_VX_ENSPOINT_MEAN: "1" -MAXTRIES_VX_ENSPOINT_PROB: "1" - -# -#----------------------------------------------------------------------- -# -# Allows an extra parameter to be passed to slurm via XML Native -# command -# -SLURM_NATIVE_CMD: "" -# -#----------------------------------------------------------------------- -# -# Set parameters associated with subhourly forecast model output and -# post-processing. -# -# SUB_HOURLY_POST: -# Flag that indicates whether the forecast model will generate output -# files on a sub-hourly time interval (e.g. 10 minutes, 15 minutes, etc). -# This will also cause the post-processor to process these sub-hourly -# files. If ths is set to "TRUE", then DT_SUBHOURLY_POST_MNTS should be -# set to a value between "00" and "59". -# -# DT_SUB_HOURLY_POST_MNTS: -# Time interval in minutes between the forecast model output files. If -# SUB_HOURLY_POST is set to "TRUE", this needs to be set to a two-digit -# integer between "01" and "59". This is not used if SUB_HOURLY_POST is -# not set to "TRUE". Note that if SUB_HOURLY_POST is set to "TRUE" but -# DT_SUB_HOURLY_POST_MNTS is set to "00", SUB_HOURLY_POST will get reset -# to "FALSE" in the experiment generation scripts (there will be an -# informational message in the log file to emphasize this). -# -#----------------------------------------------------------------------- -# -SUB_HOURLY_POST: "FALSE" -DT_SUBHOURLY_POST_MNTS: "00" -# -#----------------------------------------------------------------------- -# -# Set parameters associated with defining a customized post configuration -# file. -# -# USE_CUSTOM_POST_CONFIG_FILE: -# Flag that determines whether a user-provided custom configuration file -# should be used for post-processing the model data. If this is set to -# "TRUE", then the workflow will use the custom post-processing (UPP) -# configuration file specified in CUSTOM_POST_CONFIG_FP. Otherwise, a -# default configuration file provided in the UPP repository will be -# used. -# -# CUSTOM_POST_CONFIG_FP: -# The full path to the custom post flat file, including filename, to be -# used for post-processing. This is only used if CUSTOM_POST_CONFIG_FILE -# is set to "TRUE". -# -#----------------------------------------------------------------------- -# -USE_CUSTOM_POST_CONFIG_FILE: "FALSE" -CUSTOM_POST_CONFIG_FP: "" -# -#----------------------------------------------------------------------- -# -# Set parameters associated with outputting satellite fields in the UPP -# grib2 files using the Community Radiative Transfer Model (CRTM). -# -# USE_CRTM: -# Flag that defines whether external CRTM coefficient files have been -# staged by the user in order to output synthetic statellite products -# available within the UPP. If this is set to "TRUE", then the workflow -# will check for these files in the directory CRTM_DIR. Otherwise, it is -# assumed that no satellite fields are being requested in the UPP -# configuration. -# -# CRTM_DIR: -# This is the path to the top CRTM fix file directory. This is only used -# if USE_CRTM is set to "TRUE". -# -#----------------------------------------------------------------------- -# -USE_CRTM: "FALSE" -CRTM_DIR: "" -# -#----------------------------------------------------------------------- -# -# Set parameters associated with running ensembles. Definitions: -# -# DO_ENSEMBLE: -# Flag that determines whether to run a set of ensemble forecasts (for -# each set of specified cycles). If this is set to "TRUE", NUM_ENS_MEMBERS -# forecasts are run for each cycle, each with a different set of stochastic -# seed values. Otherwise, a single forecast is run for each cycle. -# -# NUM_ENS_MEMBERS: -# The number of ensemble members to run if DO_ENSEMBLE is set to "TRUE". -# This variable also controls the naming of the ensemble member directories. -# For example, if this is set to "8", the member directories will be named -# mem1, mem2, ..., mem8. If it is set to "08" (note the leading zero), -# the member directories will be named mem01, mem02, ..., mem08. Note, -# however, that after reading in the number of characters in this string -# (in order to determine how many leading zeros, if any, should be placed -# in the names of the member directories), the workflow generation scripts -# strip away those leading zeros. Thus, in the variable definitions file -# (GLOBAL_VAR_DEFNS_FN), this variable appear with its leading zeros -# stripped. This variable is not used if DO_ENSEMBLE is not set to "TRUE". -# -#----------------------------------------------------------------------- -# -DO_ENSEMBLE: "FALSE" -NUM_ENS_MEMBERS: "1" -# -#----------------------------------------------------------------------- -# -# Set default ad-hoc stochastic physics options. -# For detailed documentation of these parameters, see: -# https://stochastic-physics.readthedocs.io/en/ufs_public_release/namelist_options.html -# -#----------------------------------------------------------------------- -# -DO_SHUM: "FALSE" -DO_SPPT: "FALSE" -DO_SKEB: "FALSE" -ISEED_SPPT: "1" -ISEED_SHUM: "2" -ISEED_SKEB: "3" -NEW_LSCALE: "TRUE" -SHUM_MAG: "0.006" #Variable "shum" in input.nml -SHUM_LSCALE: "150000" -SHUM_TSCALE: "21600" #Variable "shum_tau" in input.nml -SHUM_INT: "3600" #Variable "shumint" in input.nml -SPPT_MAG: "0.7" #Variable "sppt" in input.nml -SPPT_LOGIT: "TRUE" -SPPT_LSCALE: "150000" -SPPT_TSCALE: "21600" #Variable "sppt_tau" in input.nml -SPPT_INT: "3600" #Variable "spptint" in input.nml -SPPT_SFCLIMIT: "TRUE" -SKEB_MAG: "0.5" #Variable "skeb" in input.nml -SKEB_LSCALE: "150000" -SKEB_TSCALE: "21600" #Variable "skeb_tau" in input.nml -SKEB_INT: "3600" #Variable "skebint" in input.nml -SKEBNORM: "1" -SKEB_VDOF: "10" -USE_ZMTNBLCK: "FALSE" -# -#----------------------------------------------------------------------- -# -# Set default SPP stochastic physics options. Each SPP option is an array, -# applicable (in order) to the scheme/parameter listed in SPP_VAR_LIST. -# Enter each value of the array in config.sh as shown below without commas -# or single quotes (e.g., SPP_VAR_LIST=( "pbl" "sfc" "mp" "rad" "gwd" ). -# Both commas and single quotes will be added by Jinja when creating the -# namelist. -# -# Note that SPP is currently only available for specific physics schemes -# used in the RAP/HRRR physics suite. Users need to be aware of which SDF -# is chosen when turning this option on. -# -# Patterns evolve and are applied at each time step. -# -#----------------------------------------------------------------------- -# -DO_SPP: "FALSE" -SPP_VAR_LIST: [ "pbl", "sfc", "mp", "rad", "gwd" ] -SPP_MAG_LIST: [ "0.2", "0.2", "0.75", "0.2", "0.2" ] #Variable "spp_prt_list" in input.nml -SPP_LSCALE: [ "150000.0", "150000.0", "150000.0", "150000.0", "150000.0" ] -SPP_TSCALE: [ "21600.0", "21600.0", "21600.0", "21600.0", "21600.0" ] #Variable "spp_tau" in input.nml -SPP_SIGTOP1: [ "0.1", "0.1", "0.1", "0.1", "0.1" ] -SPP_SIGTOP2: [ "0.025", "0.025", "0.025", "0.025", "0.025" ] -SPP_STDDEV_CUTOFF: [ "1.5", "1.5", "2.5", "1.5", "1.5" ] -ISEED_SPP: [ "4", "4", "4", "4", "4" ] -# -#----------------------------------------------------------------------- -# -# Turn on SPP in Noah or RUC LSM (support for Noah MP is in progress). -# Please be aware of the SDF that you choose if you wish to turn on LSM -# SPP. -# -# SPP in LSM schemes is handled in the &nam_sfcperts namelist block -# instead of in &nam_sppperts, where all other SPP is implemented. -# -# Perturbations to soil moisture content (SMC) are only applied at the -# first time step. -# -# LSM perturbations include SMC - soil moisture content (volume fraction), -# VGF - vegetation fraction, ALB - albedo, SAL - salinity, -# EMI - emissivity, ZOL - surface roughness (cm), and STC - soil temperature. -# -# Only five perturbations at a time can be applied currently, but all seven -# are shown below. In addition, only one unique iseed value is allowed -# at the moment, and is used for each pattern. -# -DO_LSM_SPP: "FALSE" #If true, sets lndp_type=2, lndp_model_type=2 -LSM_SPP_TSCALE: [ "21600", "21600", "21600", "21600", "21600", "21600", "21600" ] -LSM_SPP_LSCALE: [ "150000", "150000", "150000", "150000", "150000", "150000", "150000" ] -ISEED_LSM_SPP: [ "9" ] -LSM_SPP_VAR_LIST: [ "smc", "vgf", "alb", "sal", "emi", "zol", "stc" ] -LSM_SPP_MAG_LIST: [ "0.017", "0.001", "0.001", "0.001", "0.001", "0.001", "0.2" ] -# -#----------------------------------------------------------------------- -# -# HALO_BLEND: -# Number of rows into the computational domain that should be blended -# with the LBCs. To shut halo blending off, this can be set to zero. -# -#----------------------------------------------------------------------- -# -HALO_BLEND: "10" -# -#----------------------------------------------------------------------- -# -# USE_FVCOM: -# Flag set to update surface conditions in FV3-LAM with fields generated -# from the Finite Volume Community Ocean Model (FVCOM). This will -# replace lake/sea surface temperature, ice surface temperature, and ice -# placement. FVCOM data must already be interpolated to the desired -# FV3-LAM grid. This flag will be used in make_ics to modify sfc_data.nc -# after chgres_cube is run by running the routine process_FVCOM.exe -# -# FVCOM_WCSTART: -# Define if this is a "warm" start or a "cold" start. Setting this to -# "warm" will read in sfc_data.nc generated in a RESTART directory. -# Setting this to "cold" will read in the sfc_data.nc generated from -# chgres_cube in the make_ics portion of the workflow. -# -# FVCOM_DIR: -# User defined directory where FVCOM data already interpolated to FV3-LAM -# grid is located. File name in this path should be "fvcom.nc" to allow -# -# FVCOM_FILE: -# Name of file located in FVCOM_DIR that has FVCOM data interpolated to -# FV3-LAM grid. This file will be copied later to a new location and name -# changed to fvcom.nc -# -#------------------------------------------------------------------------ -# -USE_FVCOM: "FALSE" -FVCOM_WCSTART: "cold" -FVCOM_DIR: "/user/defined/dir/to/fvcom/data" -FVCOM_FILE: "fvcom.nc" -# -#----------------------------------------------------------------------- -# -# COMPILER: -# Type of compiler invoked during the build step. -# -#------------------------------------------------------------------------ -# -COMPILER: "intel" -# -#----------------------------------------------------------------------- -# -# KMP_AFFINITY_*: -# From Intel: "The Intel® runtime library has the ability to bind OpenMP -# threads to physical processing units. The interface is controlled using -# the KMP_AFFINITY environment variable. Depending on the system (machine) -# topology, application, and operating system, thread affinity can have a -# dramatic effect on the application speed. -# -# Thread affinity restricts execution of certain threads (virtual execution -# units) to a subset of the physical processing units in a multiprocessor -# computer. Depending upon the topology of the machine, thread affinity can -# have a dramatic effect on the execution speed of a program." -# -# For more information, see the following link: -# https://software.intel.com/content/www/us/en/develop/documentation/cpp- -# compiler-developer-guide-and-reference/top/optimization-and-programming- -# guide/openmp-support/openmp-library-support/thread-affinity-interface- -# linux-and-windows.html -# -# OMP_NUM_THREADS_*: -# The number of OpenMP threads to use for parallel regions. -# -# OMP_STACKSIZE_*: -# Controls the size of the stack for threads created by the OpenMP -# implementation. -# -# Note that settings for the make_grid and make_orog tasks are not -# included below as they do not use parallelized code. -# -#----------------------------------------------------------------------- -# -KMP_AFFINITY_MAKE_OROG: "disabled" -OMP_NUM_THREADS_MAKE_OROG: "6" -OMP_STACKSIZE_MAKE_OROG: "2048m" - -KMP_AFFINITY_MAKE_SFC_CLIMO: "scatter" -OMP_NUM_THREADS_MAKE_SFC_CLIMO: "1" -OMP_STACKSIZE_MAKE_SFC_CLIMO: "1024m" - -KMP_AFFINITY_MAKE_ICS: "scatter" -OMP_NUM_THREADS_MAKE_ICS: "1" -OMP_STACKSIZE_MAKE_ICS: "1024m" - -KMP_AFFINITY_MAKE_LBCS: "scatter" -OMP_NUM_THREADS_MAKE_LBCS: "1" -OMP_STACKSIZE_MAKE_LBCS: "1024m" - -KMP_AFFINITY_RUN_FCST: "scatter" -OMP_NUM_THREADS_RUN_FCST: "2" # atmos_nthreads in model_configure -OMP_STACKSIZE_RUN_FCST: "1024m" - -KMP_AFFINITY_RUN_POST: "scatter" -OMP_NUM_THREADS_RUN_POST: "1" -OMP_STACKSIZE_RUN_POST: "1024m" -# -#----------------------------------------------------------------------- -# diff --git a/ush/constants.py b/ush/constants.py deleted file mode 100644 index e0ff60a0b0..0000000000 --- a/ush/constants.py +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/env/bin python3 - -# -#----------------------------------------------------------------------- -# -# Mathematical and physical constants. -# -#----------------------------------------------------------------------- -# - -# Pi. -pi_geom = 3.14159265358979323846264338327 - -# Degrees per radian. -degs_per_radian = 360.0 / (2.0 * pi_geom) - -# Radius of the Earth in meters. -radius_Earth = 6371200.0 - -# -#----------------------------------------------------------------------- -# -# Other. -# -#----------------------------------------------------------------------- -# -valid_vals_BOOLEAN = [True, False] diff --git a/ush/constants.sh b/ush/constants.sh index 26f4cb1b32..f5bce884e8 100644 --- a/ush/constants.sh +++ b/ush/constants.sh @@ -7,13 +7,13 @@ # # Pi. -pi_geom="3.14159265358979323846264338327" +PI_GEOM="3.14159265358979323846264338327" # Degrees per radian. -degs_per_radian=$( bc -l <<< "360.0/(2.0*${pi_geom})" ) +DEGS_PER_RADIAN=$( bc -l <<< "360.0/(2.0*${PI_GEOM})" ) # Radius of the Earth in meters. -radius_Earth="6371200.0" +RADIUS_EARTH="6371200.0" # #----------------------------------------------------------------------- # diff --git a/ush/create_diag_table_file.py b/ush/create_diag_table_file.py index cd98fcec70..2752d630cc 100644 --- a/ush/create_diag_table_file.py +++ b/ush/create_diag_table_file.py @@ -1,11 +1,14 @@ #!/usr/bin/env python3 import os +import sys +import argparse import unittest from textwrap import dedent from python_utils import import_vars, set_env_var, print_input_args, \ - print_info_msg, print_err_msg_exit, cfg_to_yaml_str + print_info_msg, print_err_msg_exit, cfg_to_yaml_str, \ + load_shell_config from fill_jinja_template import fill_jinja_template @@ -61,6 +64,30 @@ def create_diag_table_file(run_dir): return False return True +def parse_args(argv): + """ Parse command line arguments""" + parser = argparse.ArgumentParser( + description='Creates diagnostic table file.' + ) + + parser.add_argument('-r', '--run-dir', + dest='run_dir', + required=True, + help='Run directory.') + + parser.add_argument('-p', '--path-to-defns', + dest='path_to_defns', + required=True, + help='Path to var_defns file.') + + return parser.parse_args(argv) + +if __name__ == '__main__': + args = parse_args(sys.argv[1:]) + cfg = load_shell_config(args.path_to_defns) + import_vars(dictionary=cfg) + create_diag_table_file(args.run_dir) + class Testing(unittest.TestCase): def test_create_diag_table_file(self): path = os.path.join(os.getenv('USHDIR'), "test_data") diff --git a/ush/create_diag_table_file.sh b/ush/create_diag_table_file.sh deleted file mode 100644 index 41ca941bf8..0000000000 --- a/ush/create_diag_table_file.sh +++ /dev/null @@ -1,117 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that creates a diagnostic table file for -# each cycle to be run. -# -#----------------------------------------------------------------------- -# -function create_diag_table_file() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "run_dir" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local diag_table_fp \ - settings -# -#----------------------------------------------------------------------- -# -# Create a diagnostics table file within the specified run directory. -# -#----------------------------------------------------------------------- -# - print_info_msg "$VERBOSE" " -Creating a diagnostics table file (\"${DIAG_TABLE_FN}\") in the specified -run directory... - - run_dir = \"${run_dir}\"" - - diag_table_fp="${run_dir}/${DIAG_TABLE_FN}" - print_info_msg "$VERBOSE" " - -Using the template diagnostics table file: - - diag_table_tmpl_fp = ${DIAG_TABLE_TMPL_FP} - -to create: - - diag_table_fp = \"${diag_table_fp}\"" - - settings=" -starttime: !datetime ${CDATE} -cres: ${CRES}" - - $USHDIR/fill_jinja_template.py -q -u "${settings}" -t "${DIAG_TABLE_TMPL_FP}" -o "${diag_table_fp}" || \ - print_err_msg_exit " -!!!!!!!!!!!!!!!!! - -fill_jinja_template.py failed! - -!!!!!!!!!!!!!!!!! -" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/create_model_configure_file.py b/ush/create_model_configure_file.py index 9ae1241397..1fafe8a7be 100644 --- a/ush/create_model_configure_file.py +++ b/ush/create_model_configure_file.py @@ -1,12 +1,15 @@ #!/usr/bin/env python3 import os +import sys +import argparse import unittest from datetime import datetime from textwrap import dedent -from python_utils import import_vars, set_env_var, print_input_args, \ - print_info_msg, print_err_msg_exit, lowercase, cfg_to_yaml_str +from python_utils import import_vars, set_env_var, print_input_args, str_to_type, \ + print_info_msg, print_err_msg_exit, lowercase, cfg_to_yaml_str, \ + load_shell_config from fill_jinja_template import fill_jinja_template @@ -151,7 +154,7 @@ def create_model_configure_file(cdate,run_dir,sub_hourly_post,dt_subhourly_post_ # to, or smaller than one hour. # if sub_hourly_post: - nsout=dt_subhourly_post_mnts*60 / dt_atmos + nsout=(dt_subhourly_post_mnts*60) // dt_atmos output_fh=0 else: output_fh=1 @@ -196,6 +199,55 @@ def create_model_configure_file(cdate,run_dir,sub_hourly_post,dt_subhourly_post_ return True +def parse_args(argv): + """ Parse command line arguments""" + parser = argparse.ArgumentParser( + description='Creates model configuration file.' + ) + + parser.add_argument('-r', '--run-dir', + dest='run_dir', + required=True, + help='Run directory.') + + parser.add_argument('-c', '--cdate', + dest='cdate', + required=True, + help='Date string in YYYYMMDD format.') + + parser.add_argument('-s', '--sub-hourly-post', + dest='sub_hourly_post', + required=True, + help='Set sub hourly post to either TRUE/FALSE by passing corresponding string.') + + parser.add_argument('-d', '--dt-subhourly-post-mnts', + dest='dt_subhourly_post_mnts', + required=True, + help='Subhourly post minitues.') + + parser.add_argument('-t', '--dt-atmos', + dest='dt_atmos', + required=True, + help='Forecast model\'s main time step.') + + parser.add_argument('-p', '--path-to-defns', + dest='path_to_defns', + required=True, + help='Path to var_defns file.') + + return parser.parse_args(argv) + +if __name__ == '__main__': + args = parse_args(sys.argv[1:]) + cfg = load_shell_config(args.path_to_defns) + import_vars(dictionary=cfg) + create_model_configure_file( \ + run_dir = args.run_dir, \ + cdate = str_to_type(args.cdate), \ + sub_hourly_post = str_to_type(args.sub_hourly_post), \ + dt_subhourly_post_mnts = str_to_type(args.dt_subhourly_post_mnts), \ + dt_atmos = str_to_type(args.dt_atmos) ) + class Testing(unittest.TestCase): def test_create_model_configure_file(self): path = os.path.join(os.getenv('USHDIR'), "test_data") @@ -240,4 +292,3 @@ def setUp(self): set_env_var('WRTCMP_dx',3000.0) set_env_var('WRTCMP_dy',3000.0) - diff --git a/ush/create_model_configure_file.sh b/ush/create_model_configure_file.sh deleted file mode 100644 index 8141877650..0000000000 --- a/ush/create_model_configure_file.sh +++ /dev/null @@ -1,257 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that creates a model configuration file -# in the specified run directory. -# -#----------------------------------------------------------------------- -# -function create_model_configure_file() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( -cdate \ -run_dir \ -sub_hourly_post \ -dt_subhourly_post_mnts \ -dt_atmos \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local yyyy \ - mm \ - dd \ - hh \ - dot_quilting_dot \ - dot_print_esmf_dot \ - settings \ - model_config_fp -# -#----------------------------------------------------------------------- -# -# Create a model configuration file in the specified run directory. -# -#----------------------------------------------------------------------- -# - print_info_msg "$VERBOSE" " -Creating a model configuration file (\"${MODEL_CONFIG_FN}\") in the specified -run directory (run_dir): - run_dir = \"${run_dir}\"" -# -# Extract from cdate the starting year, month, day, and hour of the forecast. -# - yyyy=${cdate:0:4} - mm=${cdate:4:2} - dd=${cdate:6:2} - hh=${cdate:8:2} -# -# Set parameters in the model configure file. -# - dot_quilting_dot="."$(echo_lowercase $QUILTING)"." - dot_print_esmf_dot="."$(echo_lowercase $PRINT_ESMF)"." - dot_cpl_dot="."$(echo_lowercase $CPL)"." - dot_write_dopost="."$(echo_lowercase $WRITE_DOPOST)"." -# -#----------------------------------------------------------------------- -# -# Create a multiline variable that consists of a yaml-compliant string -# specifying the values that the jinja variables in the template -# model_configure file should be set to. -# -#----------------------------------------------------------------------- -# - settings="\ - 'PE_MEMBER01': ${PE_MEMBER01} - 'print_esmf': ${dot_print_esmf_dot} - 'start_year': $yyyy - 'start_month': $mm - 'start_day': $dd - 'start_hour': $hh - 'nhours_fcst': ${FCST_LEN_HRS} - 'dt_atmos': ${DT_ATMOS} - 'cpl': ${dot_cpl_dot} - 'atmos_nthreads': ${OMP_NUM_THREADS_RUN_FCST} - 'restart_interval': ${RESTART_INTERVAL} - 'write_dopost': ${dot_write_dopost} - 'quilting': ${dot_quilting_dot} - 'output_grid': ${WRTCMP_output_grid}" -# 'output_grid': \'${WRTCMP_output_grid}\'" -# -# If the write-component is to be used, then specify a set of computational -# parameters and a set of grid parameters. The latter depends on the type -# (coordinate system) of the grid that the write-component will be using. -# - if [ "$QUILTING" = "TRUE" ]; then - - settings="${settings} - 'write_groups': ${WRTCMP_write_groups} - 'write_tasks_per_group': ${WRTCMP_write_tasks_per_group} - 'cen_lon': ${WRTCMP_cen_lon} - 'cen_lat': ${WRTCMP_cen_lat} - 'lon1': ${WRTCMP_lon_lwr_left} - 'lat1': ${WRTCMP_lat_lwr_left}" - - if [ "${WRTCMP_output_grid}" = "lambert_conformal" ]; then - - settings="${settings} - 'stdlat1': ${WRTCMP_stdlat1} - 'stdlat2': ${WRTCMP_stdlat2} - 'nx': ${WRTCMP_nx} - 'ny': ${WRTCMP_ny} - 'dx': ${WRTCMP_dx} - 'dy': ${WRTCMP_dy} - 'lon2': \"\" - 'lat2': \"\" - 'dlon': \"\" - 'dlat': \"\"" - - elif [ "${WRTCMP_output_grid}" = "regional_latlon" ] || \ - [ "${WRTCMP_output_grid}" = "rotated_latlon" ]; then - - settings="${settings} - 'lon2': ${WRTCMP_lon_upr_rght} - 'lat2': ${WRTCMP_lat_upr_rght} - 'dlon': ${WRTCMP_dlon} - 'dlat': ${WRTCMP_dlat} - 'stdlat1': \"\" - 'stdlat2': \"\" - 'nx': \"\" - 'ny': \"\" - 'dx': \"\" - 'dy': \"\"" - - fi - - fi -# -# If sub_hourly_post is set to "TRUE", then the forecast model must be -# directed to generate output files on a sub-hourly interval. Do this -# by specifying the output interval in the model configuration file -# (MODEL_CONFIG_FN) in units of number of forecat model time steps (nsout). -# nsout is calculated using the user-specified output time interval -# dt_subhourly_post_mnts (in units of minutes) and the forecast model's -# main time step dt_atmos (in units of seconds). Note that nsout is -# guaranteed to be an integer because the experiment generation scripts -# require that dt_subhourly_post_mnts (after conversion to seconds) be -# evenly divisible by dt_atmos. Also, in this case, the variable output_fh -# [which specifies the output interval in hours; -# see the jinja model_config template file] is set to 0, although this -# doesn't matter because any positive of nsout will override output_fh. -# -# If sub_hourly_post is set to "FALSE", then the workflow is hard-coded -# (in the jinja model_config template file) to direct the forecast model -# to output files every hour. This is done by setting (1) output_fh to 1 -# here, and (2) nsout to -1 here which turns off output by time step interval. -# -# Note that the approach used here of separating how hourly and subhourly -# output is handled should be changed/generalized/simplified such that -# the user should only need to specify the output time interval (there -# should be no need to specify a flag like sub_hourly_post); the workflow -# should then be able to direct the model to output files with that time -# interval and to direct the post-processor to process those files -# regardless of whether that output time interval is larger than, equal -# to, or smaller than one hour. -# - if [ "${sub_hourly_post}" = "TRUE" ]; then - nsout=$(( dt_subhourly_post_mnts*60 / dt_atmos )) - output_fh=0 - else - output_fh=1 - nsout=-1 - fi - settings="${settings} - 'output_fh': ${output_fh} - 'nsout': ${nsout}" - - print_info_msg $VERBOSE " -The variable \"settings\" specifying values to be used in the \"${MODEL_CONFIG_FN}\" -file has been set as follows: -#----------------------------------------------------------------------- -settings = -$settings" -# -#----------------------------------------------------------------------- -# -# Call a python script to generate the experiment's actual MODEL_CONFIG_FN -# file from the template file. -# -#----------------------------------------------------------------------- -# - model_config_fp="${run_dir}/${MODEL_CONFIG_FN}" - $USHDIR/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${MODEL_CONFIG_TMPL_FP} \ - -o ${model_config_fp} || \ - print_err_msg_exit "\ -Call to python script fill_jinja_template.py to create a \"${MODEL_CONFIG_FN}\" -file from a jinja2 template failed. Parameters passed to this script are: - Full path to template rocoto XML file: - MODEL_CONFIG_TMPL_FP = \"${MODEL_CONFIG_TMPL_FP}\" - Full path to output rocoto XML file: - model_config_fp = \"${model_config_fp}\" - Namelist settings specified on command line: - settings = -$settings" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/generate_FV3LAM_wflow.py b/ush/generate_FV3LAM_wflow.py index ad1e205d45..f21fe308aa 100755 --- a/ush/generate_FV3LAM_wflow.py +++ b/ush/generate_FV3LAM_wflow.py @@ -15,7 +15,7 @@ from setup import setup from set_FV3nml_sfc_climo_filenames import set_FV3nml_sfc_climo_filenames -from get_crontab_contents import get_crontab_contents +from get_crontab_contents import add_crontab_line from fill_jinja_template import fill_jinja_template from set_namelist import set_namelist @@ -449,77 +449,7 @@ def generate_FV3LAM_wflow(): #----------------------------------------------------------------------- # if USE_CRON_TO_RELAUNCH: - # - # Make a backup copy of the user's crontab file and save it in a file. - # - time_stamp = datetime.now().strftime("%F_%T") - crontab_backup_fp=os.path.join(EXPTDIR,f"crontab.bak.{time_stamp}") - print_info_msg(f''' - Copying contents of user cron table to backup file: - crontab_backup_fp = \"{crontab_backup_fp}\"''',verbose=VERBOSE) - - global called_from_cron - try: called_from_cron - except: called_from_cron = False - - crontab_cmd,crontab_contents = get_crontab_contents(called_from_cron=called_from_cron) - # To create the backup crontab file and add a new job to the user's - # existing cron table, use the "printf" command, not "echo", to print - # out variables. This is because "echo" will add a newline at the end - # of its output even if its input argument is a null string, resulting - # in extranous blank lines in the backup crontab file and/or the cron - # table itself. Using "printf" prevents the appearance of these blank - # lines. - run_command(f'''printf "%s" '{crontab_contents}' > "{crontab_backup_fp}"''') - # - # Below, we use "grep" to determine whether the crontab line that the - # variable CRONTAB_LINE contains is already present in the cron table. - # For that purpose, we need to escape the asterisks in the string in - # CRONTAB_LINE with backslashes. Do this next. - # - (_,crontab_line_esc_astr,_) = run_command(f'''printf "%s" '{CRONTAB_LINE}' | \ - {SED} -r -e "s%[*]%\\\\*%g"''') - # In the grep command below, the "^" at the beginning of the string - # passed to grep is a start-of-line anchor, and the "$" at the end is - # an end-of-line anchor. Thus, in order for grep to find a match on - # any given line of the cron table's contents, that line must contain - # exactly the string in the variable crontab_line_esc_astr without any - # leading or trailing characters. This is to eliminate situations in - # which a line in the cron table contains the string in crontab_line_esc_astr - # but is precedeeded, for example, by the comment character "#" (in which - # case cron ignores that line) and/or is followed by further commands - # that are not part of the string in crontab_line_esc_astr (in which - # case it does something more than the command portion of the string in - # crontab_line_esc_astr does). - # - if MACHINE == "WCOSS_DELL_P3": - (exit_status,grep_output,_)=run_command(f'''grep '^{crontab_line_esc_astr}$' "/u/{USER}/cron/mycrontab"''') - else: - (exit_status,grep_output,_)=run_command(f'''printf "%s" '{crontab_contents}' | grep "^{crontab_line_esc_astr}$"''') - - if exit_status == 0: - - print_info_msg(f''' - The following line already exists in the cron table and thus will not be - added: - CRONTAB_LINE = \"{CRONTAB_LINE}\"''') - - else: - - print_info_msg(f''' - Adding the following line to the user's cron table in order to automatically - resubmit SRW workflow: - CRONTAB_LINE = \"{CRONTAB_LINE}\"''',verbose=VERBOSE) - - if MACHINE == "WCOSS_DELL_P3": - run_command(f'''printf "%s\n" '{CRONTAB_LINE}' >> "/u/{USER}/cron/mycrontab"''') - else: - # Add a newline to the end of crontab_contents only if it is not empty. - # This is needed so that when CRONTAB_LINE is printed out, it appears on - # a separate line. - if crontab_contents: - crontab_contents += "\n" - run_command(f'''( printf "%s" '{crontab_contents}'; printf "%s\n" '{CRONTAB_LINE}' ) | {crontab_cmd}''') + add_crontab_line() # #----------------------------------------------------------------------- # diff --git a/ush/generate_FV3LAM_wflow.sh b/ush/generate_FV3LAM_wflow.sh deleted file mode 100755 index d9f2299139..0000000000 --- a/ush/generate_FV3LAM_wflow.sh +++ /dev/null @@ -1,1213 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that sets up a forecast -# experiment and creates a workflow (according to the parameters speci- -# fied in the configuration file; see instructions). -# -#----------------------------------------------------------------------- -# -function generate_FV3LAM_wflow() { -printf "\ -======================================================================== -======================================================================== - -Starting experiment generation... - -======================================================================== -======================================================================== -" -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -if [[ $(uname -s) == Darwin ]]; then - local scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) -else - local scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -fi -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# -local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Set directories. -# -#----------------------------------------------------------------------- -# -ushdir="${scrfunc_dir}" -# -#----------------------------------------------------------------------- -# -# Source bash utility functions and other necessary files. -# -#----------------------------------------------------------------------- -# -. $ushdir/source_util_funcs.sh -. $ushdir/get_crontab_contents.sh -. $ushdir/set_FV3nml_sfc_climo_filenames.sh -# -#----------------------------------------------------------------------- -# -# Run python checks -# -#----------------------------------------------------------------------- -# - -# This line will return two numbers: the python major and minor versions -pyversion=($(/usr/bin/env python3 -c 'import platform; major, minor, patch = platform.python_version_tuple(); print(major); print(minor)')) - -#Now, set an error check variable so that we can print all python errors rather than just the first -pyerrors=0 - -# Check that the call to python3 returned no errors, then check if the -# python3 minor version is 6 or higher -if [[ -z "$pyversion" ]];then - print_info_msg "\ - - Error: python3 not found" - pyerrors=$((pyerrors+1)) -else - if [[ ${#pyversion[@]} -lt 2 ]]; then - print_info_msg "\ - - Error retrieving python3 version" - pyerrors=$((pyerrors+1)) - elif [[ ${pyversion[1]} -lt 6 ]]; then - print_info_msg "\ - - Error: python version must be 3.6 or higher - python version: ${pyversion[*]}" - pyerrors=$((pyerrors+1)) - fi -fi - -#Next, check for the non-standard python packages: jinja2, yaml, and f90nml -pkgs=(jinja2 yaml f90nml) -for pkg in ${pkgs[@]} ; do - if ! /usr/bin/env python3 -c "import ${pkg}" &> /dev/null; then - print_info_msg "\ - - Error: python module ${pkg} not available" - pyerrors=$((pyerrors+1)) - fi -done - -#Finally, check if the number of errors is >0, and if so exit with helpful message -if [ $pyerrors -gt 0 ];then - print_err_msg_exit "\ - Errors found: check your python environment - - Instructions for setting up python environments can be found on the web: - https://github.com/ufs-community/ufs-srweather-app/wiki/Getting-Started - -" -fi -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Source the file that defines and then calls the setup function. The -# setup function in turn first sources the default configuration file -# (which contains default values for the experiment/workflow parameters) -# and then sources the user-specified configuration file (which contains -# user-specified values for a subset of the experiment/workflow parame- -# ters that override their default values). -# -#----------------------------------------------------------------------- -# -. $ushdir/setup.sh -# -#----------------------------------------------------------------------- -# -# Set the full path to the experiment's rocoto workflow xml file. This -# file will be placed at the top level of the experiment directory and -# then used by rocoto to run the workflow. -# -#----------------------------------------------------------------------- -# -WFLOW_XML_FP="$EXPTDIR/${WFLOW_XML_FN}" -# -#----------------------------------------------------------------------- -# -# Create a multiline variable that consists of a yaml-compliant string -# specifying the values that the jinja variables in the template rocoto -# XML should be set to. These values are set either in the user-specified -# workflow configuration file (EXPT_CONFIG_FN) or in the setup.sh script -# sourced above. Then call the python script that generates the XML. -# -#----------------------------------------------------------------------- -# -if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then - - template_xml_fp="${TEMPLATE_DIR}/${WFLOW_XML_FN}" - - print_info_msg " -Creating rocoto workflow XML file (WFLOW_XML_FP) from jinja template XML -file (template_xml_fp): - template_xml_fp = \"${template_xml_fp}\" - WFLOW_XML_FP = \"${WFLOW_XML_FP}\"" - - ensmem_indx_name="\"\"" - uscore_ensmem_name="\"\"" - slash_ensmem_subdir="\"\"" - if [ "${DO_ENSEMBLE}" = "TRUE" ]; then - ensmem_indx_name="mem" - uscore_ensmem_name="_mem#${ensmem_indx_name}#" - slash_ensmem_subdir="/mem#${ensmem_indx_name}#" - fi - - settings="\ -# -# Parameters needed by the job scheduler. -# - 'account': $ACCOUNT - 'sched': $SCHED - 'partition_default': ${PARTITION_DEFAULT} - 'queue_default': ${QUEUE_DEFAULT} - 'partition_hpss': ${PARTITION_HPSS} - 'queue_hpss': ${QUEUE_HPSS} - 'partition_fcst': ${PARTITION_FCST} - 'queue_fcst': ${QUEUE_FCST} - 'machine': ${MACHINE} - 'slurm_native_cmd': ${SLURM_NATIVE_CMD} -# -# Workflow task names. -# - 'make_grid_tn': ${MAKE_GRID_TN} - 'make_orog_tn': ${MAKE_OROG_TN} - 'make_sfc_climo_tn': ${MAKE_SFC_CLIMO_TN} - 'get_extrn_ics_tn': ${GET_EXTRN_ICS_TN} - 'get_extrn_lbcs_tn': ${GET_EXTRN_LBCS_TN} - 'make_ics_tn': ${MAKE_ICS_TN} - 'make_lbcs_tn': ${MAKE_LBCS_TN} - 'run_fcst_tn': ${RUN_FCST_TN} - 'run_post_tn': ${RUN_POST_TN} - 'get_obs_ccpa_tn': ${GET_OBS_CCPA_TN} - 'get_obs_ndas_tn': ${GET_OBS_NDAS_TN} - 'get_obs_mrms_tn': ${GET_OBS_MRMS_TN} - 'vx_tn': ${VX_TN} - 'vx_gridstat_tn': ${VX_GRIDSTAT_TN} - 'vx_gridstat_refc_tn': ${VX_GRIDSTAT_REFC_TN} - 'vx_gridstat_retop_tn': ${VX_GRIDSTAT_RETOP_TN} - 'vx_gridstat_03h_tn': ${VX_GRIDSTAT_03h_TN} - 'vx_gridstat_06h_tn': ${VX_GRIDSTAT_06h_TN} - 'vx_gridstat_24h_tn': ${VX_GRIDSTAT_24h_TN} - 'vx_pointstat_tn': ${VX_POINTSTAT_TN} - 'vx_ensgrid_tn': ${VX_ENSGRID_TN} - 'vx_ensgrid_refc_tn': ${VX_ENSGRID_REFC_TN} - 'vx_ensgrid_retop_tn': ${VX_ENSGRID_RETOP_TN} - 'vx_ensgrid_03h_tn': ${VX_ENSGRID_03h_TN} - 'vx_ensgrid_06h_tn': ${VX_ENSGRID_06h_TN} - 'vx_ensgrid_24h_tn': ${VX_ENSGRID_24h_TN} - 'vx_ensgrid_mean_tn': ${VX_ENSGRID_MEAN_TN} - 'vx_ensgrid_prob_tn': ${VX_ENSGRID_PROB_TN} - 'vx_ensgrid_mean_03h_tn': ${VX_ENSGRID_MEAN_03h_TN} - 'vx_ensgrid_prob_03h_tn': ${VX_ENSGRID_PROB_03h_TN} - 'vx_ensgrid_mean_06h_tn': ${VX_ENSGRID_MEAN_06h_TN} - 'vx_ensgrid_prob_06h_tn': ${VX_ENSGRID_PROB_06h_TN} - 'vx_ensgrid_mean_24h_tn': ${VX_ENSGRID_MEAN_24h_TN} - 'vx_ensgrid_prob_24h_tn': ${VX_ENSGRID_PROB_24h_TN} - 'vx_ensgrid_prob_refc_tn': ${VX_ENSGRID_PROB_REFC_TN} - 'vx_ensgrid_prob_retop_tn': ${VX_ENSGRID_PROB_RETOP_TN} - 'vx_enspoint_tn': ${VX_ENSPOINT_TN} - 'vx_enspoint_mean_tn': ${VX_ENSPOINT_MEAN_TN} - 'vx_enspoint_prob_tn': ${VX_ENSPOINT_PROB_TN} -# -# Entity used to load the module file for each GET_OBS_* task. -# - 'get_obs': ${GET_OBS} -# -# Number of nodes to use for each task. -# - 'nnodes_make_grid': ${NNODES_MAKE_GRID} - 'nnodes_make_orog': ${NNODES_MAKE_OROG} - 'nnodes_make_sfc_climo': ${NNODES_MAKE_SFC_CLIMO} - 'nnodes_get_extrn_ics': ${NNODES_GET_EXTRN_ICS} - 'nnodes_get_extrn_lbcs': ${NNODES_GET_EXTRN_LBCS} - 'nnodes_make_ics': ${NNODES_MAKE_ICS} - 'nnodes_make_lbcs': ${NNODES_MAKE_LBCS} - 'nnodes_run_fcst': ${NNODES_RUN_FCST} - 'nnodes_run_post': ${NNODES_RUN_POST} - 'nnodes_get_obs_ccpa': ${NNODES_GET_OBS_CCPA} - 'nnodes_get_obs_mrms': ${NNODES_GET_OBS_MRMS} - 'nnodes_get_obs_ndas': ${NNODES_GET_OBS_NDAS} - 'nnodes_vx_gridstat': ${NNODES_VX_GRIDSTAT} - 'nnodes_vx_pointstat': ${NNODES_VX_POINTSTAT} - 'nnodes_vx_ensgrid': ${NNODES_VX_ENSGRID} - 'nnodes_vx_ensgrid_mean': ${NNODES_VX_ENSGRID_MEAN} - 'nnodes_vx_ensgrid_prob': ${NNODES_VX_ENSGRID_PROB} - 'nnodes_vx_enspoint': ${NNODES_VX_ENSPOINT} - 'nnodes_vx_enspoint_mean': ${NNODES_VX_ENSPOINT_MEAN} - 'nnodes_vx_enspoint_prob': ${NNODES_VX_ENSPOINT_PROB} -# -# Number of cores used for a task -# - 'ncores_run_fcst': ${PE_MEMBER01} - 'native_run_fcst': --cpus-per-task ${OMP_NUM_THREADS_RUN_FCST} --exclusive -# -# Number of logical processes per node for each task. If running without -# threading, this is equal to the number of MPI processes per node. -# - 'ppn_make_grid': ${PPN_MAKE_GRID} - 'ppn_make_orog': ${PPN_MAKE_OROG} - 'ppn_make_sfc_climo': ${PPN_MAKE_SFC_CLIMO} - 'ppn_get_extrn_ics': ${PPN_GET_EXTRN_ICS} - 'ppn_get_extrn_lbcs': ${PPN_GET_EXTRN_LBCS} - 'ppn_make_ics': ${PPN_MAKE_ICS} - 'ppn_make_lbcs': ${PPN_MAKE_LBCS} - 'ppn_run_fcst': ${PPN_RUN_FCST} - 'ppn_run_post': ${PPN_RUN_POST} - 'ppn_get_obs_ccpa': ${PPN_GET_OBS_CCPA} - 'ppn_get_obs_mrms': ${PPN_GET_OBS_MRMS} - 'ppn_get_obs_ndas': ${PPN_GET_OBS_NDAS} - 'ppn_vx_gridstat': ${PPN_VX_GRIDSTAT} - 'ppn_vx_pointstat': ${PPN_VX_POINTSTAT} - 'ppn_vx_ensgrid': ${PPN_VX_ENSGRID} - 'ppn_vx_ensgrid_mean': ${PPN_VX_ENSGRID_MEAN} - 'ppn_vx_ensgrid_prob': ${PPN_VX_ENSGRID_PROB} - 'ppn_vx_enspoint': ${PPN_VX_ENSPOINT} - 'ppn_vx_enspoint_mean': ${PPN_VX_ENSPOINT_MEAN} - 'ppn_vx_enspoint_prob': ${PPN_VX_ENSPOINT_PROB} -# -# Maximum wallclock time for each task. -# - 'wtime_make_grid': ${WTIME_MAKE_GRID} - 'wtime_make_orog': ${WTIME_MAKE_OROG} - 'wtime_make_sfc_climo': ${WTIME_MAKE_SFC_CLIMO} - 'wtime_get_extrn_ics': ${WTIME_GET_EXTRN_ICS} - 'wtime_get_extrn_lbcs': ${WTIME_GET_EXTRN_LBCS} - 'wtime_make_ics': ${WTIME_MAKE_ICS} - 'wtime_make_lbcs': ${WTIME_MAKE_LBCS} - 'wtime_run_fcst': ${WTIME_RUN_FCST} - 'wtime_run_post': ${WTIME_RUN_POST} - 'wtime_get_obs_ccpa': ${WTIME_GET_OBS_CCPA} - 'wtime_get_obs_mrms': ${WTIME_GET_OBS_MRMS} - 'wtime_get_obs_ndas': ${WTIME_GET_OBS_NDAS} - 'wtime_vx_gridstat': ${WTIME_VX_GRIDSTAT} - 'wtime_vx_pointstat': ${WTIME_VX_POINTSTAT} - 'wtime_vx_ensgrid': ${WTIME_VX_ENSGRID} - 'wtime_vx_ensgrid_mean': ${WTIME_VX_ENSGRID_MEAN} - 'wtime_vx_ensgrid_prob': ${WTIME_VX_ENSGRID_PROB} - 'wtime_vx_enspoint': ${WTIME_VX_ENSPOINT} - 'wtime_vx_enspoint_mean': ${WTIME_VX_ENSPOINT_MEAN} - 'wtime_vx_enspoint_prob': ${WTIME_VX_ENSPOINT_PROB} -# -# Maximum number of tries for each task. -# - 'maxtries_make_grid': ${MAXTRIES_MAKE_GRID} - 'maxtries_make_orog': ${MAXTRIES_MAKE_OROG} - 'maxtries_make_sfc_climo': ${MAXTRIES_MAKE_SFC_CLIMO} - 'maxtries_get_extrn_ics': ${MAXTRIES_GET_EXTRN_ICS} - 'maxtries_get_extrn_lbcs': ${MAXTRIES_GET_EXTRN_LBCS} - 'maxtries_make_ics': ${MAXTRIES_MAKE_ICS} - 'maxtries_make_lbcs': ${MAXTRIES_MAKE_LBCS} - 'maxtries_run_fcst': ${MAXTRIES_RUN_FCST} - 'maxtries_run_post': ${MAXTRIES_RUN_POST} - 'maxtries_get_obs_ccpa': ${MAXTRIES_GET_OBS_CCPA} - 'maxtries_get_obs_mrms': ${MAXTRIES_GET_OBS_MRMS} - 'maxtries_get_obs_ndas': ${MAXTRIES_GET_OBS_NDAS} - 'maxtries_vx_gridstat': ${MAXTRIES_VX_GRIDSTAT} - 'maxtries_vx_gridstat_refc': ${MAXTRIES_VX_GRIDSTAT_REFC} - 'maxtries_vx_gridstat_retop': ${MAXTRIES_VX_GRIDSTAT_RETOP} - 'maxtries_vx_gridstat_03h': ${MAXTRIES_VX_GRIDSTAT_03h} - 'maxtries_vx_gridstat_06h': ${MAXTRIES_VX_GRIDSTAT_06h} - 'maxtries_vx_gridstat_24h': ${MAXTRIES_VX_GRIDSTAT_24h} - 'maxtries_vx_pointstat': ${MAXTRIES_VX_POINTSTAT} - 'maxtries_vx_ensgrid': ${MAXTRIES_VX_ENSGRID} - 'maxtries_vx_ensgrid_refc': ${MAXTRIES_VX_ENSGRID_REFC} - 'maxtries_vx_ensgrid_retop': ${MAXTRIES_VX_ENSGRID_RETOP} - 'maxtries_vx_ensgrid_03h': ${MAXTRIES_VX_ENSGRID_03h} - 'maxtries_vx_ensgrid_06h': ${MAXTRIES_VX_ENSGRID_06h} - 'maxtries_vx_ensgrid_24h': ${MAXTRIES_VX_ENSGRID_24h} - 'maxtries_vx_ensgrid_mean': ${MAXTRIES_VX_ENSGRID_MEAN} - 'maxtries_vx_ensgrid_prob': ${MAXTRIES_VX_ENSGRID_PROB} - 'maxtries_vx_ensgrid_mean_03h': ${MAXTRIES_VX_ENSGRID_MEAN_03h} - 'maxtries_vx_ensgrid_prob_03h': ${MAXTRIES_VX_ENSGRID_PROB_03h} - 'maxtries_vx_ensgrid_mean_06h': ${MAXTRIES_VX_ENSGRID_MEAN_06h} - 'maxtries_vx_ensgrid_prob_06h': ${MAXTRIES_VX_ENSGRID_PROB_06h} - 'maxtries_vx_ensgrid_mean_24h': ${MAXTRIES_VX_ENSGRID_MEAN_24h} - 'maxtries_vx_ensgrid_prob_24h': ${MAXTRIES_VX_ENSGRID_PROB_24h} - 'maxtries_vx_ensgrid_prob_refc': ${MAXTRIES_VX_ENSGRID_PROB_REFC} - 'maxtries_vx_ensgrid_prob_retop': ${MAXTRIES_VX_ENSGRID_PROB_RETOP} - 'maxtries_vx_enspoint': ${MAXTRIES_VX_ENSPOINT} - 'maxtries_vx_enspoint_mean': ${MAXTRIES_VX_ENSPOINT_MEAN} - 'maxtries_vx_enspoint_prob': ${MAXTRIES_VX_ENSPOINT_PROB} -# -# Flags that specify whether to run the preprocessing or -# verification-related tasks. -# - 'run_task_make_grid': ${RUN_TASK_MAKE_GRID} - 'run_task_make_orog': ${RUN_TASK_MAKE_OROG} - 'run_task_make_sfc_climo': ${RUN_TASK_MAKE_SFC_CLIMO} - 'run_task_get_extrn_ics': ${RUN_TASK_GET_EXTRN_ICS} - 'run_task_get_extrn_lbcs': ${RUN_TASK_GET_EXTRN_LBCS} - 'run_task_make_ics': ${RUN_TASK_MAKE_ICS} - 'run_task_make_lbcs': ${RUN_TASK_MAKE_LBCS} - 'run_task_run_fcst': ${RUN_TASK_RUN_FCST} - 'run_task_run_post': ${RUN_TASK_RUN_POST} - 'run_task_get_obs_ccpa': ${RUN_TASK_GET_OBS_CCPA} - 'run_task_get_obs_mrms': ${RUN_TASK_GET_OBS_MRMS} - 'run_task_get_obs_ndas': ${RUN_TASK_GET_OBS_NDAS} - 'run_task_vx_gridstat': ${RUN_TASK_VX_GRIDSTAT} - 'run_task_vx_pointstat': ${RUN_TASK_VX_POINTSTAT} - 'run_task_vx_ensgrid': ${RUN_TASK_VX_ENSGRID} - 'run_task_vx_enspoint': ${RUN_TASK_VX_ENSPOINT} -# -# Number of physical cores per node for the current machine. -# - 'ncores_per_node': ${NCORES_PER_NODE} -# -# Directories and files. -# - 'jobsdir': $JOBSDIR - 'logdir': $LOGDIR - 'scriptsdir': $SCRIPTSDIR - 'cycle_basedir': ${CYCLE_BASEDIR} - 'global_var_defns_fp': ${GLOBAL_VAR_DEFNS_FP} - 'load_modules_run_task_fp': ${LOAD_MODULES_RUN_TASK_FP} -# -# External model information for generating ICs and LBCs. -# - 'extrn_mdl_name_ics': ${EXTRN_MDL_NAME_ICS} - 'extrn_mdl_name_lbcs': ${EXTRN_MDL_NAME_LBCS} -# -# Parameters that determine the set of cycles to run. -# - 'date_first_cycl': ${DATE_FIRST_CYCL} - 'date_last_cycl': ${DATE_LAST_CYCL} - 'cdate_first_cycl': !datetime ${DATE_FIRST_CYCL}${CYCL_HRS[0]} - 'cycl_hrs': [ $( printf "\'%s\', " "${CYCL_HRS[@]}" ) ] - 'cycl_freq': !!str ${INCR_CYCL_FREQ}:00:00 -# -# Forecast length (same for all cycles). -# - 'fcst_len_hrs': ${FCST_LEN_HRS} -# -# Inline post -# - 'write_dopost': ${WRITE_DOPOST} -# -# METPlus-specific information -# - 'model': ${MODEL} - 'met_install_dir': ${MET_INSTALL_DIR} - 'met_bin_exec': ${MET_BIN_EXEC} - 'metplus_path': ${METPLUS_PATH} - 'vx_config_dir': ${VX_CONFIG_DIR} - 'metplus_conf': ${METPLUS_CONF} - 'met_config': ${MET_CONFIG} - 'ccpa_obs_dir': ${CCPA_OBS_DIR} - 'mrms_obs_dir': ${MRMS_OBS_DIR} - 'ndas_obs_dir': ${NDAS_OBS_DIR} -# -# Ensemble-related parameters. -# - 'do_ensemble': ${DO_ENSEMBLE} - 'num_ens_members': ${NUM_ENS_MEMBERS} - 'ndigits_ensmem_names': !!str ${NDIGITS_ENSMEM_NAMES} - 'ensmem_indx_name': ${ensmem_indx_name} - 'uscore_ensmem_name': ${uscore_ensmem_name} - 'slash_ensmem_subdir': ${slash_ensmem_subdir} -# -# Parameters associated with subhourly post-processed output -# - 'sub_hourly_post': ${SUB_HOURLY_POST} - 'delta_min': ${DT_SUBHOURLY_POST_MNTS} - 'first_fv3_file_tstr': "000:"`$DATE_UTIL -d "${DATE_FIRST_CYCL} +${DT_ATMOS} seconds" +%M:%S` -" # End of "settings" variable. - - print_info_msg "$VERBOSE" " -The variable \"settings\" specifying values of the rococo XML variables -has been set as follows: -#----------------------------------------------------------------------- -settings = -$settings" - -# -# Call the python script to generate the experiment's actual XML file -# from the jinja template file. -# - $USHDIR/fill_jinja_template.py -q \ - -u "${settings}" \ - -t ${template_xml_fp} \ - -o ${WFLOW_XML_FP} || \ - print_err_msg_exit "\ -Call to python script fill_jinja_template.py to create a rocoto workflow -XML file from a template file failed. Parameters passed to this script -are: - Full path to template rocoto XML file: - template_xml_fp = \"${template_xml_fp}\" - Full path to output rocoto XML file: - WFLOW_XML_FP = \"${WFLOW_XML_FP}\" - Namelist settings specified on command line: - settings = -$settings" - -fi -# -#----------------------------------------------------------------------- -# -# Create a symlink in the experiment directory that points to the workflow -# (re)launch script. -# -#----------------------------------------------------------------------- -# -print_info_msg "$VERBOSE" " -Creating symlink in the experiment directory (EXPTDIR) that points to the -workflow launch script (WFLOW_LAUNCH_SCRIPT_FP): - EXPTDIR = \"${EXPTDIR}\" - WFLOW_LAUNCH_SCRIPT_FP = \"${WFLOW_LAUNCH_SCRIPT_FP}\"" -create_symlink_to_file target="${WFLOW_LAUNCH_SCRIPT_FP}" \ - symlink="${EXPTDIR}/${WFLOW_LAUNCH_SCRIPT_FN}" \ - relative="FALSE" -# -#----------------------------------------------------------------------- -# -# If USE_CRON_TO_RELAUNCH is set to TRUE, add a line to the user's cron -# table to call the (re)launch script every CRON_RELAUNCH_INTVL_MNTS mi- -# nutes. -# -#----------------------------------------------------------------------- -# -if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then -# -# Make a backup copy of the user's crontab file and save it in a file. -# - time_stamp=$( $DATE_UTIL "+%F_%T" ) - crontab_backup_fp="$EXPTDIR/crontab.bak.${time_stamp}" - print_info_msg "$VERBOSE" " -Copying contents of user cron table to backup file: - crontab_backup_fp = \"${crontab_backup_fp}\"" - - called_from_cron=${called_from_cron:-"FALSE"} - get_crontab_contents called_from_cron=${called_from_cron} \ - outvarname_crontab_cmd="crontab_cmd" \ - outvarname_crontab_contents="crontab_contents" - # To create the backup crontab file and add a new job to the user's - # existing cron table, use the "printf" command, not "echo", to print - # out variables. This is because "echo" will add a newline at the end - # of its output even if its input argument is a null string, resulting - # in extranous blank lines in the backup crontab file and/or the cron - # table itself. Using "printf" prevents the appearance of these blank - # lines. - printf "%s" "${crontab_contents}" > "${crontab_backup_fp}" - # - # Below, we use "grep" to determine whether the crontab line that the - # variable CRONTAB_LINE contains is already present in the cron table. - # For that purpose, we need to escape the asterisks in the string in - # CRONTAB_LINE with backslashes. Do this next. - # - crontab_line_esc_astr=$( printf "%s" "${CRONTAB_LINE}" | \ - $SED -r -e "s%[*]%\\\\*%g" ) - # - # In the grep command below, the "^" at the beginning of the string - # passed to grep is a start-of-line anchor, and the "$" at the end is - # an end-of-line anchor. Thus, in order for grep to find a match on - # any given line of the cron table's contents, that line must contain - # exactly the string in the variable crontab_line_esc_astr without any - # leading or trailing characters. This is to eliminate situations in - # which a line in the cron table contains the string in crontab_line_esc_astr - # but is precedeeded, for example, by the comment character "#" (in which - # case cron ignores that line) and/or is followed by further commands - # that are not part of the string in crontab_line_esc_astr (in which - # case it does something more than the command portion of the string in - # crontab_line_esc_astr does). - # - if [ "$MACHINE" = "WCOSS_DELL_P3" ]; then - grep_output=$( grep "^${crontab_line_esc_astr}$" "/u/$USER/cron/mycrontab" ) - else - grep_output=$( printf "%s" "${crontab_contents}" | grep "^${crontab_line_esc_astr}$" ) - fi - exit_status=$? - - if [ "${exit_status}" -eq 0 ]; then - - print_info_msg " -The following line already exists in the cron table and thus will not be -added: - CRONTAB_LINE = \"${CRONTAB_LINE}\"" - - else - - print_info_msg "$VERBOSE" " -Adding the following line to the user's cron table in order to automatically -resubmit SRW workflow: - CRONTAB_LINE = \"${CRONTAB_LINE}\"" - - if [ "$MACHINE" = "WCOSS_DELL_P3" ]; then - printf "%s\n" "${CRONTAB_LINE}" >> "/u/$USER/cron/mycrontab" - else - # Add a newline to the end of crontab_contents only if it is not empty. - # This is needed so that when CRONTAB_LINE is printed out, it appears on - # a separate line. - crontab_contents=${crontab_contents:+"${crontab_contents}"$'\n'} - # When printing CRONTAB_LINE, add a newline at the end. This is necessary - # on certain machines (e.g. Cheyenne) while on others, it doesn't make - # a difference. - ( printf "%s" "${crontab_contents}"; printf "%s\n" "${CRONTAB_LINE}" ) | ${crontab_cmd} - fi - - fi - -fi -# -#----------------------------------------------------------------------- -# -# Create the FIXam directory under the experiment directory. In NCO mode, -# this will be a symlink to the directory specified in FIXgsm, while in -# community mode, it will be an actual directory with files copied into -# it from FIXgsm. -# -#----------------------------------------------------------------------- -# -# First, consider NCO mode. -# -if [ "${RUN_ENVIR}" = "nco" ]; then - - ln_vrfy -fsn "$FIXgsm" "$FIXam" -# -# Resolve the target directory that the FIXam symlink points to and check -# that it exists. -# - path_resolved=$( $READLINK -m "$FIXam" ) - if [ ! -d "${path_resolved}" ]; then - print_err_msg_exit "\ -In order to be able to generate a forecast experiment in NCO mode (i.e. -when RUN_ENVIR set to \"nco\"), the path specified by FIXam after resolving -all symlinks (path_resolved) must be an existing directory (but in this -case isn't): - RUN_ENVIR = \"${RUN_ENVIR}\" - FIXam = \"$FIXam\" - path_resolved = \"${path_resolved}\" -Please ensure that path_resolved is an existing directory and then rerun -the experiment generation script." - fi -# -# Now consider community mode. -# -else - - print_info_msg "$VERBOSE" " -Copying fixed files from system directory (FIXgsm) to a subdirectory -(FIXam) in the experiment directory: - FIXgsm = \"$FIXgsm\" - FIXam = \"$FIXam\"" - - check_for_preexist_dir_file "$FIXam" "delete" - mkdir_vrfy -p "$FIXam" - mkdir_vrfy -p "$FIXam/fix_co2_proj" - - num_files=${#FIXgsm_FILES_TO_COPY_TO_FIXam[@]} - for (( i=0; i<${num_files}; i++ )); do - fn="${FIXgsm_FILES_TO_COPY_TO_FIXam[$i]}" - cp_vrfy "$FIXgsm/$fn" "$FIXam/$fn" - done - -fi -# -#----------------------------------------------------------------------- -# -# Copy MERRA2 aerosol climatology data. -# -#----------------------------------------------------------------------- -# -if [ "${USE_MERRA_CLIMO}" = "TRUE" ]; then - print_info_msg "$VERBOSE" " -Copying MERRA2 aerosol climatology data files from system directory -(FIXaer/FIXlut) to a subdirectory (FIXclim) in the experiment directory: - FIXaer = \"${FIXaer}\" - FIXlut = \"${FIXlut}\" - FIXclim = \"${FIXclim}\"" - - check_for_preexist_dir_file "${FIXclim}" "delete" - mkdir_vrfy -p "${FIXclim}" - - cp_vrfy "${FIXaer}/merra2.aerclim"*".nc" "${FIXclim}/" - cp_vrfy "${FIXlut}/optics"*".dat" "${FIXclim}/" -fi -# -#----------------------------------------------------------------------- -# -# Copy templates of various input files to the experiment directory. -# -#----------------------------------------------------------------------- -# -print_info_msg "$VERBOSE" " -Copying templates of various input files to the experiment directory..." - -print_info_msg "$VERBOSE" " - Copying the template data table file to the experiment directory..." -cp_vrfy "${DATA_TABLE_TMPL_FP}" "${DATA_TABLE_FP}" - -print_info_msg "$VERBOSE" " - Copying the template field table file to the experiment directory..." -cp_vrfy "${FIELD_TABLE_TMPL_FP}" "${FIELD_TABLE_FP}" - -print_info_msg "$VERBOSE" " - Copying the template NEMS configuration file to the experiment directory..." -cp_vrfy "${NEMS_CONFIG_TMPL_FP}" "${NEMS_CONFIG_FP}" -# -# Copy the CCPP physics suite definition file from its location in the -# clone of the FV3 code repository to the experiment directory (EXPT- -# DIR). -# -print_info_msg "$VERBOSE" " -Copying the CCPP physics suite definition XML file from its location in -the forecast model directory sturcture to the experiment directory..." -cp_vrfy "${CCPP_PHYS_SUITE_IN_CCPP_FP}" "${CCPP_PHYS_SUITE_FP}" -# -# Copy the field dictionary file from its location in the -# clone of the FV3 code repository to the experiment directory (EXPT- -# DIR). -# -print_info_msg "$VERBOSE" " -Copying the field dictionary file from its location in the forecast -model directory sturcture to the experiment directory..." -cp_vrfy "${FIELD_DICT_IN_UWM_FP}" "${FIELD_DICT_FP}" -# -#----------------------------------------------------------------------- -# -# Set parameters in the FV3-LAM namelist file. -# -#----------------------------------------------------------------------- -# -print_info_msg " -Setting parameters in weather model's namelist file (FV3_NML_FP): - FV3_NML_FP = \"${FV3_NML_FP}\"" -# -# Set npx and npy, which are just NX plus 1 and NY plus 1, respectively. -# These need to be set in the FV3-LAM Fortran namelist file. They represent -# the number of cell vertices in the x and y directions on the regional -# grid. -# -npx=$((NX+1)) -npy=$((NY+1)) -# -# For the physics suites that use RUC LSM, set the parameter kice to 9, -# Otherwise, leave it unspecified (which means it gets set to the default -# value in the forecast model). -# -# NOTE: -# May want to remove kice from FV3.input.yml (and maybe input.nml.FV3). -# -kice="" -if [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then - kice="9" -fi -# -# Set lsoil, which is the number of input soil levels provided in the -# chgres_cube output NetCDF file. This is the same as the parameter -# nsoill_out in the namelist file for chgres_cube. [On the other hand, -# the parameter lsoil_lsm (not set here but set in input.nml.FV3 and/or -# FV3.input.yml) is the number of soil levels that the LSM scheme in the -# forecast model will run with.] Here, we use the same approach to set -# lsoil as the one used to set nsoill_out in exregional_make_ics.sh. -# See that script for details. -# -# NOTE: -# May want to remove lsoil from FV3.input.yml (and maybe input.nml.FV3). -# Also, may want to set lsm here as well depending on SDF_USES_RUC_LSM. -# -lsoil="4" -if [ "${EXTRN_MDL_NAME_ICS}" = "HRRR" -o \ - "${EXTRN_MDL_NAME_ICS}" = "RAP" ] && \ - [ "${SDF_USES_RUC_LSM}" = "TRUE" ]; then - lsoil="9" -fi -# -# Create a multiline variable that consists of a yaml-compliant string -# specifying the values that the namelist variables that are physics- -# suite-independent need to be set to. Below, this variable will be -# passed to a python script that will in turn set the values of these -# variables in the namelist file. -# -# IMPORTANT: -# If we want a namelist variable to be removed from the namelist file, -# in the "settings" variable below, we need to set its value to the -# string "null". This is equivalent to setting its value to -# !!python/none -# in the base namelist file specified by FV3_NML_BASE_SUITE_FP or the -# suite-specific yaml settings file specified by FV3_NML_YAML_CONFIG_FP. -# -# It turns out that setting the variable to an empty string also works -# to remove it from the namelist! Which is better to use?? -# -settings="\ -'atmos_model_nml': { - 'blocksize': $BLOCKSIZE, - 'ccpp_suite': ${CCPP_PHYS_SUITE}, - } -'fv_core_nml': { - 'target_lon': ${LON_CTR}, - 'target_lat': ${LAT_CTR}, - 'nrows_blend': ${HALO_BLEND}, -# -# Question: -# For a ESGgrid type grid, what should stretch_fac be set to? This depends -# on how the FV3 code uses the stretch_fac parameter in the namelist file. -# Recall that for a ESGgrid, it gets set in the function set_gridparams_ESGgrid(.sh) -# to something like 0.9999, but is it ok to set it to that here in the -# FV3 namelist file? -# - 'stretch_fac': ${STRETCH_FAC}, - 'npx': $npx, - 'npy': $npy, - 'layout': [${LAYOUT_X}, ${LAYOUT_Y}], - 'bc_update_interval': ${LBC_SPEC_INTVL_HRS}, - } -'gfs_physics_nml': { - 'kice': ${kice:-null}, - 'lsoil': ${lsoil:-null}, - 'do_shum': ${DO_SHUM}, - 'do_sppt': ${DO_SPPT}, - 'do_skeb': ${DO_SKEB}, - 'do_spp': ${DO_SPP}, - 'n_var_spp': ${N_VAR_SPP}, - 'n_var_lndp': ${N_VAR_LNDP}, - 'lndp_type': ${LNDP_TYPE}, - 'fhcyc': ${FHCYC_LSM_SPP_OR_NOT}, - }" -# -# Add to "settings" the values of those namelist variables that specify -# the paths to fixed files in the FIXam directory. As above, these namelist -# variables are physcs-suite-independent. -# -# Note that the array FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING contains -# the mapping between the namelist variables and the names of the files -# in the FIXam directory. Here, we loop through this array and process -# each element to construct each line of "settings". -# -settings="$settings -'namsfc': {" - -dummy_run_dir="$EXPTDIR/any_cyc" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then - dummy_run_dir="${dummy_run_dir}/any_ensmem" -fi - -regex_search="^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" -num_nml_vars=${#FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING[@]} -for (( i=0; i<${num_nml_vars}; i++ )); do - - mapping="${FV3_NML_VARNAME_TO_FIXam_FILES_MAPPING[$i]}" - nml_var_name=$( printf "%s\n" "$mapping" | \ - $SED -n -r -e "s/${regex_search}/\1/p" ) - FIXam_fn=$( printf "%s\n" "$mapping" | - $SED -n -r -e "s/${regex_search}/\2/p" ) - - fp="\"\"" - if [ ! -z "${FIXam_fn}" ]; then - fp="$FIXam/${FIXam_fn}" -# -# If not in NCO mode, for portability and brevity, change fp so that it -# is a relative path (relative to any cycle directory immediately under -# the experiment directory). -# - if [ "${RUN_ENVIR}" != "nco" ]; then - fp=$( realpath --canonicalize-missing --relative-to="${dummy_run_dir}" "$fp" ) - fi - fi -# -# Add a line to the variable "settings" that specifies (in a yaml-compliant -# format) the name of the current namelist variable and the value it should -# be set to. -# - settings="$settings - '${nml_var_name}': $fp," - -done -# -# Add the closing curly bracket to "settings". -# -settings="$settings - }" -# -# Use netCDF4 when running the North American 3-km domain due to file size. -# -if [ "${PREDEF_GRID_NAME}" = "RRFS_NA_3km" ]; then -settings="$settings -'fms2_io_nml': { - 'netcdf_default_format': netcdf4, - }" -fi -# -# Add the relevant tendency-based stochastic physics namelist variables to -# "settings" when running with SPPT, SHUM, or SKEB turned on. If running -# with SPP or LSM SPP, set the "new_lscale" variable. Otherwise only -# include an empty "nam_stochy" stanza. -# -settings="$settings -'nam_stochy': {" -if [ "${DO_SPPT}" = "TRUE" ]; then - settings="$settings - 'iseed_sppt': ${ISEED_SPPT}, - 'new_lscale': ${NEW_LSCALE}, - 'sppt': ${SPPT_MAG}, - 'sppt_logit': ${SPPT_LOGIT}, - 'sppt_lscale': ${SPPT_LSCALE}, - 'sppt_sfclimit': ${SPPT_SFCLIMIT}, - 'sppt_tau': ${SPPT_TSCALE}, - 'spptint': ${SPPT_INT}, - 'use_zmtnblck': ${USE_ZMTNBLCK}," -fi - -if [ "${DO_SHUM}" = "TRUE" ]; then - settings="$settings - 'iseed_shum': ${ISEED_SHUM}, - 'new_lscale': ${NEW_LSCALE}, - 'shum': ${SHUM_MAG}, - 'shum_lscale': ${SHUM_LSCALE}, - 'shum_tau': ${SHUM_TSCALE}, - 'shumint': ${SHUM_INT}," -fi - -if [ "${DO_SKEB}" = "TRUE" ]; then - settings="$settings - 'iseed_skeb': ${ISEED_SKEB}, - 'new_lscale': ${NEW_LSCALE}, - 'skeb': ${SKEB_MAG}, - 'skeb_lscale': ${SKEB_LSCALE}, - 'skebnorm': ${SKEBNORM}, - 'skeb_tau': ${SKEB_TSCALE}, - 'skebint': ${SKEB_INT}, - 'skeb_vdof': ${SKEB_VDOF}," -fi - -if [ "${DO_SPP}" = "TRUE" ] || [ "${DO_LSM_SPP}" = "TRUE" ]; then - settings="$settings - 'new_lscale': ${NEW_LSCALE}," -fi -settings="$settings - }" -# -# Add the relevant SPP namelist variables to "settings" when running with -# SPP turned on. Otherwise only include an empty "nam_sppperts" stanza. -# -settings="$settings -'nam_sppperts': {" -if [ "${DO_SPP}" = "TRUE" ]; then - settings="$settings - 'iseed_spp': [ $( printf "%s, " "${ISEED_SPP[@]}" ) ], - 'spp_lscale': [ $( printf "%s, " "${SPP_LSCALE[@]}" ) ], - 'spp_prt_list': [ $( printf "%s, " "${SPP_MAG_LIST[@]}" ) ], - 'spp_sigtop1': [ $( printf "%s, " "${SPP_SIGTOP1[@]}" ) ], - 'spp_sigtop2': [ $( printf "%s, " "${SPP_SIGTOP2[@]}" ) ], - 'spp_stddev_cutoff': [ $( printf "%s, " "${SPP_STDDEV_CUTOFF[@]}" ) ], - 'spp_tau': [ $( printf "%s, " "${SPP_TSCALE[@]}" ) ], - 'spp_var_list': [ $( printf "%s, " "${SPP_VAR_LIST[@]}" ) ]," -fi -settings="$settings - }" -# -# Add the relevant LSM SPP namelist variables to "settings" when running with -# LSM SPP turned on. -# -settings="$settings -'nam_sfcperts': {" -if [ "${DO_LSM_SPP}" = "TRUE" ]; then - settings="$settings - 'lndp_type': ${LNDP_TYPE}, - 'lndp_model_type': ${LNDP_MODEL_TYPE}, - 'lndp_tau': [ $( printf "%s, " "${LSM_SPP_TSCALE[@]}" ) ], - 'lndp_lscale': [ $( printf "%s, " "${LSM_SPP_LSCALE[@]}" ) ], - 'iseed_lndp': [ $( printf "%s, " "${ISEED_LSM_SPP[@]}" ) ], - 'lndp_var_list': [ $( printf "%s, " "${LSM_SPP_VAR_LIST[@]}" ) ], - 'lndp_prt_list': [ $( printf "%s, " "${LSM_SPP_MAG_LIST[@]}" ) ]," -fi -settings="$settings - }" -print_info_msg $VERBOSE " -The variable \"settings\" specifying values of the weather model's -namelist variables has been set as follows: - -settings = -$settings" -# -#----------------------------------------------------------------------- -# -# Call the set_namelist.py script to create a new FV3 namelist file (full -# path specified by FV3_NML_FP) using the file FV3_NML_BASE_SUITE_FP as -# the base (i.e. starting) namelist file, with physics-suite-dependent -# modifications to the base file specified in the yaml configuration file -# FV3_NML_YAML_CONFIG_FP (for the physics suite specified by CCPP_PHYS_SUITE), -# and with additional physics-suite-independent modificaitons specified -# in the variable "settings" set above. -# -#----------------------------------------------------------------------- -# -$USHDIR/set_namelist.py -q \ - -n ${FV3_NML_BASE_SUITE_FP} \ - -c ${FV3_NML_YAML_CONFIG_FP} ${CCPP_PHYS_SUITE} \ - -u "$settings" \ - -o ${FV3_NML_FP} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to generate an FV3 namelist file -failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_BASE_SUITE_FP = \"${FV3_NML_BASE_SUITE_FP}\" - Full path to yaml configuration file for various physics suites: - FV3_NML_YAML_CONFIG_FP = \"${FV3_NML_YAML_CONFIG_FP}\" - Physics suite to extract from yaml configuration file: - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" - Full path to output namelist file: - FV3_NML_FP = \"${FV3_NML_FP}\" - Namelist settings specified on command line: - settings = -$settings" -# -# If not running the MAKE_GRID_TN task (which implies the workflow will -# use pregenerated grid files), set the namelist variables specifying -# the paths to surface climatology files. These files are located in -# (or have symlinks that point to them) in the FIXLAM directory. -# -# Note that if running the MAKE_GRID_TN task, this action usually cannot -# be performed here but must be performed in that task because the names -# of the surface climatology files depend on the CRES parameter (which is -# the C-resolution of the grid), and this parameter is in most workflow -# configurations is not known until the grid is created. -# -if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - - set_FV3nml_sfc_climo_filenames || print_err_msg_exit "\ -Call to function to set surface climatology file names in the FV3 namelist -file failed." - -fi -# -#----------------------------------------------------------------------- -# -# To have a record of how this experiment/workflow was generated, copy -# the experiment/workflow configuration file to the experiment directo- -# ry. -# -#----------------------------------------------------------------------- -# -cp_vrfy $USHDIR/${EXPT_CONFIG_FN} $EXPTDIR -# -#----------------------------------------------------------------------- -# -# For convenience, print out the commands that need to be issued on the -# command line in order to launch the workflow and to check its status. -# Also, print out the line that should be placed in the user's cron table -# in order for the workflow to be continually resubmitted. -# -#----------------------------------------------------------------------- -# -if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then - wflow_db_fn="${WFLOW_XML_FN%.xml}.db" - rocotorun_cmd="rocotorun -w ${WFLOW_XML_FN} -d ${wflow_db_fn} -v 10" - rocotostat_cmd="rocotostat -w ${WFLOW_XML_FN} -d ${wflow_db_fn} -v 10" -fi - -print_info_msg " -======================================================================== -======================================================================== - -Experiment generation completed. The experiment directory is: - - EXPTDIR=\"$EXPTDIR\" - -======================================================================== -======================================================================== -" -# -#----------------------------------------------------------------------- -# -# If rocoto is required, print instructions on how to load and use it -# -#----------------------------------------------------------------------- -# -if [ "${WORKFLOW_MANAGER}" = "rocoto" ]; then - - print_info_msg "\ -To launch the workflow, first ensure that you have a compatible version -of rocoto available. For most pre-configured platforms, rocoto can be -loaded via a module: - - > module load rocoto - -For more details on rocoto, see the User's Guide. - -To launch the workflow, first ensure that you have a compatible version -of rocoto loaded. For example, to load version 1.3.1 of rocoto, use - - > module load rocoto/1.3.1 - -(This version has been tested on hera; later versions may also work but -have not been tested.) - -To launch the workflow, change location to the experiment directory -(EXPTDIR) and issue the rocotrun command, as follows: - - > cd $EXPTDIR - > ${rocotorun_cmd} - -To check on the status of the workflow, issue the rocotostat command -(also from the experiment directory): - - > ${rocotostat_cmd} - -Note that: - -1) The rocotorun command must be issued after the completion of each - task in the workflow in order for the workflow to submit the next - task(s) to the queue. - -2) In order for the output of the rocotostat command to be up-to-date, - the rocotorun command must be issued immediately before issuing the - rocotostat command. - -For automatic resubmission of the workflow (say every 3 minutes), the -following line can be added to the user's crontab (use \"crontab -e\" to -edit the cron table): - -*/3 * * * * cd $EXPTDIR && ./launch_FV3LAM_wflow.sh called_from_cron=\"TRUE\" -" - -fi -# -# If necessary, run the NOMADS script to source external model data. -# -if [ "${NOMADS}" = "TRUE" ]; then - echo "Getting NOMADS online data" - echo "NOMADS_file_type=" $NOMADS_file_type - cd $EXPTDIR - $USHDIR/NOMADS_get_extrn_mdl_files.sh $DATE_FIRST_CYCL $CYCL_HRS $NOMADS_file_type $FCST_LEN_HRS $LBC_SPEC_INTVL_HRS -fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - -} - -# -#----------------------------------------------------------------------- -# -# Start of the script that will call the experiment/workflow generation -# function defined above. -# -#----------------------------------------------------------------------- -# -set -u -#set -x -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -if [[ $(uname -s) == Darwin ]]; then - command -v greadlink >/dev/null 2>&1 || { echo >&2 "For Darwin-based operating systems (MacOS), the 'greadlink' utility is required to run the UFS SRW Application. Reference the User's Guide for more information about platform requirements. Aborting."; exit 1; } - scrfunc_fp=$( greadlink -f "${BASH_SOURCE[0]}" ) -else - scrfunc_fp=$( readlink -f "${BASH_SOURCE[0]}" ) -fi -scrfunc_fn=$( basename "${scrfunc_fp}" ) -scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Set directories. -# -#----------------------------------------------------------------------- -# -ushdir="${scrfunc_dir}" -# -# Set the name of and full path to the temporary file in which we will -# save some experiment/workflow variables. The need for this temporary -# file is explained below. -# -tmp_fn="tmp" -tmp_fp="$ushdir/${tmp_fn}" -rm -f "${tmp_fp}" -# -# Set the name of and full path to the log file in which the output from -# the experiment/workflow generation function will be saved. -# -log_fn="log.generate_FV3LAM_wflow" -log_fp="$ushdir/${log_fn}" -rm -f "${log_fp}" -# -# Call the generate_FV3LAM_wflow function defined above to generate the -# experiment/workflow. Note that we pipe the output of the function -# (and possibly other commands) to the "tee" command in order to be able -# to both save it to a file and print it out to the screen (stdout). -# The piping causes the call to the function (and the other commands -# grouped with it using the curly braces, { ... }) to be executed in a -# subshell. As a result, the experiment/workflow variables that the -# function sets are not available outside of the grouping, i.e. they are -# not available at and after the call to "tee". Since some of these va- -# riables are needed after the call to "tee" below, we save them in a -# temporary file and read them in outside the subshell later below. -# -{ -generate_FV3LAM_wflow 2>&1 # If this exits with an error, the whole {...} group quits, so things don't work... -retval=$? -echo "$EXPTDIR" >> "${tmp_fp}" -echo "$retval" >> "${tmp_fp}" -} | tee "${log_fp}" -# -# Read in experiment/workflow variables needed later below from the tem- -# porary file created in the subshell above containing the call to the -# generate_FV3LAM_wflow function. These variables are not directly -# available here because the call to generate_FV3LAM_wflow above takes -# place in a subshell (due to the fact that we are then piping its out- -# put to the "tee" command). Then remove the temporary file. -# -exptdir=$( sed "1q;d" "${tmp_fp}" ) -retval=$( sed "2q;d" "${tmp_fp}" ) -rm "${tmp_fp}" -# -# If the call to the generate_FV3LAM_wflow function above was success- -# ful, move the log file in which the "tee" command saved the output of -# the function to the experiment directory. -# -if [[ $retval == 0 ]]; then - mv "${log_fp}" "$exptdir" -# -# If the call to the generate_FV3LAM_wflow function above was not suc- -# cessful, print out an error message and exit with a nonzero return -# code. -# -else - printf " -Experiment generation failed. Check the log file from the experiment -generation script in the file specified by log_fp: - log_fp = \"${log_fp}\" -Stopping. -" - exit 1 -fi diff --git a/ush/get_crontab_contents.py b/ush/get_crontab_contents.py index cbb434c69e..3f885ad978 100644 --- a/ush/get_crontab_contents.py +++ b/ush/get_crontab_contents.py @@ -1,12 +1,13 @@ #!/usr/bin/env python3 import os +import sys import unittest +import argparse from datetime import datetime from python_utils import import_vars, set_env_var, print_input_args, \ - run_command, define_macos_utilities, check_var_valid_value -from constants import valid_vals_BOOLEAN + run_command, define_macos_utilities, print_info_msg def get_crontab_contents(called_from_cron): """ @@ -36,15 +37,10 @@ def get_crontab_contents(called_from_cron): """ print_input_args(locals()) - - #import all env vars + + #import selected env vars IMPORTS = ["MACHINE", "USER"] import_vars(env_vars=IMPORTS) - - # - # Make sure called_from_cron is set to a valid value. - # - check_var_valid_value(called_from_cron, valid_vals_BOOLEAN) if MACHINE == "WCOSS_DELL_P3": __crontab_cmd__="" @@ -62,17 +58,115 @@ def get_crontab_contents(called_from_cron): if called_from_cron: __crontab_cmd__="/usr/bin/crontab" (_,__crontab_contents__,_)=run_command(f'''{__crontab_cmd__} -l''') + + return __crontab_cmd__, __crontab_contents__ + +def add_crontab_line(): + """ Add crontab line to cron table """ + + #import selected env vars + IMPORTS = ["MACHINE", "USER", "CRONTAB_LINE", "VERBOSE", "EXPTDIR"] + import_vars(env_vars=IMPORTS) + # - # On Cheyenne, the output of the "crontab -l" command contains a 3-line - # header (comments) at the top that is not actually part of the user's - # cron table. This needs to be removed to avoid adding an unnecessary - # copy of this header to the user's cron table. + # Make a backup copy of the user's crontab file and save it in a file. # - if MACHINE == "CHEYENNE": - (_,__crontab_contents__,_)=run_command(f'''printf "%s" "{__crontab_contents__}" | tail -n +4 ''') + time_stamp = datetime.now().strftime("%F_%T") + crontab_backup_fp=os.path.join(EXPTDIR,f"crontab.bak.{time_stamp}") + print_info_msg(f''' + Copying contents of user cron table to backup file: + crontab_backup_fp = \"{crontab_backup_fp}\"''',verbose=VERBOSE) + + global called_from_cron + try: called_from_cron + except: called_from_cron = False + + # Get crontab contents + crontab_cmd,crontab_contents = get_crontab_contents(called_from_cron=called_from_cron) + + # Create backup + run_command(f'''printf "%s" '{crontab_contents}' > "{crontab_backup_fp}"''') + + # Add crontab line + if CRONTAB_LINE in crontab_contents: + + print_info_msg(f''' + The following line already exists in the cron table and thus will not be + added: + CRONTAB_LINE = \"{CRONTAB_LINE}\"''') + + else: + + print_info_msg(f''' + Adding the following line to the user's cron table in order to automatically + resubmit SRW workflow: + CRONTAB_LINE = \"{CRONTAB_LINE}\"''',verbose=VERBOSE) + + #add new line to crontab contents if it doesn't have one + NEWLINE_CHAR="" + if crontab_contents and crontab_contents[-1] != "\n": + NEWLINE_CHAR="\n" + + #add the crontab line + if MACHINE == "WCOSS_DELL_P3": + run_command(f'''printf "%b%s\n" '{NEWLINE_CHAR}' '{CRONTAB_LINE}' >> "/u/{USER}/cron/mycrontab"''') + else: + run_command(f'''printf "%s%b%s\n" '{crontab_contents}' '{NEWLINE_CHAR}' '{CRONTAB_LINE}' | {crontab_cmd}''') + +def delete_crontab_line(called_from_cron): + """ Delete crontab line after job is complete i.e. either SUCCESS/FAILURE + but not IN PROGRESS status""" + + print_input_args(locals()) - return __crontab_cmd__, __crontab_contents__ + #import selected env vars + IMPORTS = ["MACHINE", "USER", "CRONTAB_LINE"] + import_vars(env_vars=IMPORTS) + + # + # Get the full contents of the user's cron table. + # + (crontab_cmd,crontab_contents) = get_crontab_contents(called_from_cron) + # + # Remove the line in the contents of the cron table corresponding to the + # current forecast experiment (if that line is part of the contents). + # Then record the results back into the user's cron table. + # + if (CRONTAB_LINE + '\n') in crontab_contents: + crontab_contents = crontab_contents.replace(CRONTAB_LINE+'\n','') + else: + crontab_contents = crontab_contents.replace(CRONTAB_LINE,'') + + if MACHINE == "WCOSS_DELL_P3": + run_command(f'''echo '{crontab_contents}' > "/u/{USER}/cron/mycrontab"''') + else: + run_command(f'''echo '{crontab_contents}' | {crontab_cmd}''') + +def parse_args(argv): + """ Parse command line arguments for deleting crontab line. + This is needed because it is called from shell script + """ + parser = argparse.ArgumentParser( + description='Crontab job manupilation program.' + ) + + parser.add_argument('-d', '--delete', + dest='delete', + action='store_true', + help='Delete crontab line.') + + parser.add_argument('-c', '--called-from-cron', + dest='called_from_cron', + action='store_true', + help='Called from cron.') + + return parser.parse_args(argv) +if __name__ == '__main__': + args = parse_args(sys.argv[1:]) + if args.delete: + delete_crontab_line(args.called_from_cron) + class Testing(unittest.TestCase): def test_get_crontab_contents(self): crontab_cmd,crontab_contents = get_crontab_contents(called_from_cron=True) diff --git a/ush/get_crontab_contents.sh b/ush/get_crontab_contents.sh deleted file mode 100644 index a7a00854ef..0000000000 --- a/ush/get_crontab_contents.sh +++ /dev/null @@ -1,74 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that returns the contents of the user's -# cron table as well as the command to use to manipulate the cron table -# (i.e. the "crontab" command, but on some platforms the version or -# location of this may change depending on other circumstances, e.g. on -# Cheyenne, this depends on whether a script that wants to call "crontab" -# is itself being called from a cron job). Arguments are as follows: -# -# called_from_cron: -# Boolean flag that specifies whether this function (and the scripts or -# functions that are calling it) are called as part of a cron job. Must -# be set to "TRUE" or "FALSE". -# -# outvarname_crontab_cmd: -# Name of the output variable that will contain the command to issue for -# the system "crontab" command. -# -# outvarname_crontab_contents: -# Name of the output variable that will contain the contents of the -# user's cron table. -# -#----------------------------------------------------------------------- -# -function get_crontab_contents() { - - { save_shell_opts; set -u +x; } > /dev/null 2>&1 - - local valid_args=( \ - "called_from_cron" \ - "outvarname_crontab_cmd" \ - "outvarname_crontab_contents" \ - ) - process_args valid_args "$@" - print_input_args "valid_args" - - local __crontab_cmd__ \ - __crontab_contents__ - # - # Make sure called_from_cron is set to a valid value. - # - source $USHDIR/constants.sh - check_var_valid_value "called_from_cron" "valid_vals_BOOLEAN" - called_from_cron=$(boolify "${called_from_cron}") - - if [ "$MACHINE" = "WCOSS_DELL_P3" ]; then - __crontab_cmd__="" - __crontab_contents__=$( cat "/u/$USER/cron/mycrontab" ) - else - __crontab_cmd__="crontab" - # - # On Cheyenne, simply typing "crontab" will launch the crontab command - # at "/glade/u/apps/ch/opt/usr/bin/crontab". This is a containerized - # version of crontab that will work if called from scripts that are - # themselves being called as cron jobs. In that case, we must instead - # call the system version of crontab at /usr/bin/crontab. - # - if [ "$MACHINE" = "CHEYENNE" ]; then - if [ -n "${called_from_cron}" ] && [ "${called_from_cron}" = "TRUE" ]; then - __crontab_cmd__="/usr/bin/crontab" - fi - fi - __crontab_contents__=$( ${__crontab_cmd__} -l ) - fi - # - # Set output variables. - # - printf -v ${outvarname_crontab_cmd} "%s" "${__crontab_cmd__}" - printf -v ${outvarname_crontab_contents} "%s" "${__crontab_contents__}" - - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/launch_FV3LAM_wflow.sh b/ush/launch_FV3LAM_wflow.sh index 8a4dbe6031..17734b9477 100755 --- a/ush/launch_FV3LAM_wflow.sh +++ b/ush/launch_FV3LAM_wflow.sh @@ -99,7 +99,6 @@ fi . $USHDIR/constants.sh . $USHDIR/source_util_funcs.sh . $USHDIR/init_env.sh -. $USHDIR/get_crontab_contents.sh # #----------------------------------------------------------------------- # @@ -393,35 +392,14 @@ script for this experiment: CRONTAB_LINE = \"${CRONTAB_LINE}\" " # -# Below, we use "grep" to determine whether the crontab line that the -# variable CRONTAB_LINE contains is already present in the cron table. -# For that purpose, we need to escape the asterisks in the string in -# CRONTAB_LINE with backslashes. Do this next. +# Remove CRONTAB_LINE from cron table # - crontab_line_esc_astr=$( printf "%s" "${CRONTAB_LINE}" | \ - $SED -r -e "s%[*]%\\\\*%g" ) -# -# Get the full contents of the user's cron table. -# - get_crontab_contents called_from_cron=${called_from_cron} \ - outvarname_crontab_cmd="crontab_cmd" \ - outvarname_crontab_contents="crontab_contents" -# -# Remove the line in the contents of the cron table corresponding to the -# current forecast experiment (if that line is part of the contents). -# Then record the results back into the user's cron table. -# -# In the string passed to the grep command below, we use the line start -# and line end anchors ("^" and "$", respectively) to ensure that we -# only find lines in the crontab that contain exactly the string in -# crontab_line_esc_astr without any leading or trailing characters. -# - crontab_contents=$( echo "${crontab_contents}" | grep -v "^${crontab_line_esc_astr}$" ) - - if [ "$MACHINE" = "WCOSS_DELL_P3" ]; then - echo "${crontab_contents}" > "/u/$USER/cron/mycrontab" + if [ "${called_from_cron}" = "TRUE" ]; then + MACHINE=$MACHINE CRONTAB_LINE=$CRONTAB_LINE \ + python3 $USHDIR/get_crontab_contents.py --delete --called-from-cron else - echo "${crontab_contents}" | ${crontab_cmd} + MACHINE=$MACHINE CRONTAB_LINE=$CRONTAB_LINE \ + python3 $USHDIR/get_crontab_contents.py --delete fi fi diff --git a/ush/link_fix.py b/ush/link_fix.py old mode 100644 new mode 100755 index 9788a4ad47..56fccc8567 --- a/ush/link_fix.py +++ b/ush/link_fix.py @@ -2,12 +2,14 @@ import unittest import os +import sys +import argparse import glob from python_utils import import_vars, set_env_var, print_input_args, \ print_info_msg, print_err_msg_exit, create_symlink_to_file, \ define_macos_utilities, check_var_valid_value, \ - cd_vrfy, mkdir_vrfy, find_pattern_in_str + cd_vrfy, mkdir_vrfy, find_pattern_in_str, load_shell_config def link_fix(verbose, file_group): """ This file defines a function that ... @@ -300,28 +302,10 @@ def link_fix(verbose, file_group): #----------------------------------------------------------------------- # if file_group == "grid": - target=f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc" symlink=f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.nc" create_symlink_to_file(target,symlink,True) # - # The surface climatology file generation code looks for a grid file - # having a name of the form "C${GFDLgrid_RES}_grid.tile7.halo4.nc" (i.e. - # the C-resolution used in the name of this file is the number of grid - # points per horizontal direction per tile, just like in the global model). - # Thus, if we are running the MAKE_SFC_CLIMO_TN task, if the grid is of - # GFDLgrid type, and if we are not using GFDLgrid_RES in filenames (i.e. - # we are using the equivalent global uniform grid resolution instead), - # then create a link whose name uses the GFDLgrid_RES that points to the - # link whose name uses the equivalent global uniform resolution. - # - if RUN_TASK_MAKE_SFC_CLIMO and \ - GRID_GEN_METHOD == "GFDLgrid" and \ - not GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: - target=f"{cres}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.halo{NH4}.nc" - symlink=f"C{GFDLgrid_RES}{DOT_OR_USCORE}grid.tile{TILE_RGNL}.nc" - create_symlink_to_file(target,symlink,relative) - # #----------------------------------------------------------------------- # # If considering surface climatology files, create symlinks to the surface @@ -366,6 +350,30 @@ def link_fix(verbose, file_group): return res +def parse_args(argv): + """ Parse command line arguments""" + parser = argparse.ArgumentParser( + description='Creates symbolic links to FIX directories.' + ) + + parser.add_argument('-f', '--file-group', + dest='file_group', + required=True, + help='File group, could be one of ["grid", "orog", "sfc_climo"].') + + parser.add_argument('-p', '--path-to-defns', + dest='path_to_defns', + required=True, + help='Path to var_defns file.') + + return parser.parse_args(argv) + +if __name__ == '__main__': + args = parse_args(sys.argv[1:]) + cfg = load_shell_config(args.path_to_defns) + import_vars(dictionary=cfg) + link_fix(VERBOSE, args.file_group) + class Testing(unittest.TestCase): def test_link_fix(self): res = link_fix(verbose=True, file_group="grid") diff --git a/ush/link_fix.sh b/ush/link_fix.sh deleted file mode 100755 index 48bf0ca3f8..0000000000 --- a/ush/link_fix.sh +++ /dev/null @@ -1,460 +0,0 @@ -#!/bin/bash - -# -#----------------------------------------------------------------------- -# -# This file defines a function that ... -# -#----------------------------------------------------------------------- -# -function link_fix() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names that this script/function can -# accept. Then process the arguments provided to it (which should con- -# sist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ -"verbose" \ -"file_group" \ -"output_varname_res_in_filenames" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local valid_vals_verbose \ - valid_vals_file_group \ - fns \ - fps \ - run_task \ - sfc_climo_fields \ - num_fields \ - i \ - ii \ - res_prev \ - res \ - fp_prev \ - fp \ - fn \ - relative_link_flag \ - cres \ - tmp \ - fns_sfc_climo_with_halo_in_fn \ - fns_sfc_climo_no_halo_in_fn \ - target \ - symlink -# -#----------------------------------------------------------------------- -# -# Set the valid values that various input arguments can take on and then -# ensure that the values passed in are one of these valid values. -# -#----------------------------------------------------------------------- -# - valid_vals_verbose=( "TRUE" "FALSE" ) - check_var_valid_value "verbose" "valid_vals_verbose" - - valid_vals_file_group=( "grid" "orog" "sfc_climo" ) - check_var_valid_value "file_group" "valid_vals_file_group" -# -#----------------------------------------------------------------------- -# -# Create symlinks in the FIXLAM directory pointing to the grid files. -# These symlinks are needed by the make_orog, make_sfc_climo, make_ic, -# make_lbc, and/or run_fcst tasks. -# -# Note that we check that each target file exists before attempting to -# create symlinks. This is because the "ln" command will create sym- -# links to non-existent targets without returning with a nonzero exit -# code. -# -#----------------------------------------------------------------------- -# - print_info_msg "$verbose" " -Creating links in the FIXLAM directory to the grid files..." -# -#----------------------------------------------------------------------- -# -# Create globbing patterns for grid, orography, and surface climatology -# files. -# -# -# For grid files (i.e. file_group set to "grid"), symlinks are created -# in the FIXLAM directory to files (of the same names) in the GRID_DIR. -# These symlinks/files and the reason each is needed is listed below: -# -# 1) "C*.mosaic.halo${NHW}.nc" -# This mosaic file for the wide-halo grid (i.e. the grid with a ${NHW}- -# cell-wide halo) is needed as an input to the orography filtering -# executable in the orography generation task. The filtering code -# extracts from this mosaic file the name of the file containing the -# grid on which it will generate filtered topography. Note that the -# orography generation and filtering are both performed on the wide- -# halo grid. The filtered orography file on the wide-halo grid is then -# shaved down to obtain the filtered orography files with ${NH3}- and -# ${NH4}-cell-wide halos. -# -# The raw orography generation step in the make_orog task requires the -# following symlinks/files: -# -# a) C*.mosaic.halo${NHW}.nc -# The script for the make_orog task extracts the name of the grid -# file from this mosaic file; this name should be -# "C*.grid.tile${TILE_RGNL}.halo${NHW}.nc". -# -# b) C*.grid.tile${TILE_RGNL}.halo${NHW}.nc -# This is the -# The script for the make_orog task passes the name of the grid -# file (extracted above from the mosaic file) to the orography -# generation executable. The executable then -# reads in this grid file and generates a raw orography -# file on the grid. The raw orography file is initially renamed "out.oro.nc", -# but for clarity, it is then renamed "C*.raw_orog.tile${TILE_RGNL}.halo${NHW}.nc". -# -# c) The fixed files thirty.second.antarctic.new.bin, landcover30.fixed, -# and gmted2010.30sec.int. -# -# The orography filtering step in the make_orog task requires the -# following symlinks/files: -# -# a) C*.mosaic.halo${NHW}.nc -# This is the mosaic file for the wide-halo grid. The orography -# filtering executable extracts from this file the name of the grid -# file containing the wide-halo grid (which should be -# "${CRES}.grid.tile${TILE_RGNL}.halo${NHW}.nc"). The executable then -# looks for this grid file IN THE DIRECTORY IN WHICH IT IS RUNNING. -# Thus, before running the executable, the script creates a symlink in this run directory that -# points to the location of the actual wide-halo grid file. -# -# b) C*.raw_orog.tile${TILE_RGNL}.halo${NHW}.nc -# This is the raw orography file on the wide-halo grid. The script -# for the make_orog task copies this file to a new file named -# "C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc" that will be -# used as input to the orography filtering executable. The executable -# will then overwrite the contents of this file with the filtered orography. -# Thus, the output of the orography filtering executable will be -# the file C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc. -# -# The shaving step in the make_orog task requires the following: -# -# a) C*.filtered_orog.tile${TILE_RGNL}.halo${NHW}.nc -# This is the filtered orography file on the wide-halo grid. -# This gets shaved down to two different files: -# -# i) ${CRES}.oro_data.tile${TILE_RGNL}.halo${NH0}.nc -# This is the filtered orography file on the halo-0 grid. -# -# ii) ${CRES}.oro_data.tile${TILE_RGNL}.halo${NH4}.nc -# This is the filtered orography file on the halo-4 grid. -# -# Note that the file names of the shaved files differ from that of -# the initial unshaved file on the wide-halo grid in that the field -# after ${CRES} is now "oro_data" (not "filtered_orog") to comply -# with the naming convention used more generally. -# -# 2) "C*.mosaic.halo${NH4}.nc" -# This mosaic file for the grid with a 4-cell-wide halo is needed as -# an input to the surface climatology generation executable. The -# surface climatology generation code reads from this file the number -# of tiles (which should be 1 for a regional grid) and the tile names. -# More importantly, using the ESMF function ESMF_GridCreateMosaic(), -# it creates a data object of type esmf_grid; the grid information -# in this object is obtained from the grid file specified in the mosaic -# file, which should be "C*.grid.tile${TILE_RGNL}.halo${NH4}.nc". The -# dimensions specified in this grid file must match the ones specified -# in the (filtered) orography file "C*.oro_data.tile${TILE_RGNL}.halo${NH4}.nc" -# that is also an input to the surface climatology generation executable. -# If they do not, then the executable will crash with an ESMF library -# error (something like "Arguments are incompatible"). -# -# Thus, for the make_sfc_climo task, the following symlinks/files must -# exist: -# a) "C*.mosaic.halo${NH4}.nc" -# b) "C*.grid.tile${TILE_RGNL}.halo${NH4}.nc" -# c) "C*.oro_data.tile${TILE_RGNL}.halo${NH4}.nc" -# -# 3) -# -# -#----------------------------------------------------------------------- -# - case "${file_group}" in -# - "grid") - fns=( \ - "C*${DOT_OR_USCORE}mosaic.halo${NHW}.nc" \ - "C*${DOT_OR_USCORE}mosaic.halo${NH4}.nc" \ - "C*${DOT_OR_USCORE}mosaic.halo${NH3}.nc" \ - "C*${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NHW}.nc" \ - "C*${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH3}.nc" \ - "C*${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH4}.nc" \ - ) - fps=( "${fns[@]/#/${GRID_DIR}/}" ) - run_task="${RUN_TASK_MAKE_GRID}" - ;; -# - "orog") - fns=( \ - "C*${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH0}.nc" \ - "C*${DOT_OR_USCORE}oro_data.tile${TILE_RGNL}.halo${NH4}.nc" \ - ) - if [ "${CCPP_PHYS_SUITE}" = "FV3_HRRR" ]; then - fns+=( \ - "C*${DOT_OR_USCORE}oro_data_ss.tile${TILE_RGNL}.halo${NH0}.nc" \ - "C*${DOT_OR_USCORE}oro_data_ls.tile${TILE_RGNL}.halo${NH0}.nc" \ - ) - fi - fps=( "${fns[@]/#/${OROG_DIR}/}" ) - run_task="${RUN_TASK_MAKE_OROG}" - ;; -# -# The following list of symlinks (which have the same names as their -# target files) need to be created made in order for the make_ics and -# make_lbcs tasks (i.e. tasks involving chgres_cube) to work. -# - "sfc_climo") - num_fields=${#SFC_CLIMO_FIELDS[@]} - fns=() - for (( i=0; i<${num_fields}; i++ )); do - ii=$((2*i)) - fns[$ii]="C*.${SFC_CLIMO_FIELDS[$i]}.tile${TILE_RGNL}.halo${NH0}.nc" - fns[$ii+1]="C*.${SFC_CLIMO_FIELDS[$i]}.tile${TILE_RGNL}.halo${NH4}.nc" - done - fps=( "${fns[@]/#/${SFC_CLIMO_DIR}/}" ) - run_task="${RUN_TASK_MAKE_SFC_CLIMO}" - ;; -# - esac -# -#----------------------------------------------------------------------- -# -# Find all files matching the globbing patterns and make sure that they -# all have the same resolution (an integer) in their names. -# -#----------------------------------------------------------------------- -# - i=0 - res_prev="" - res="" - fp_prev="" - - for fp in ${fps[@]}; do - - fn=$( basename $fp ) - - res=$( printf "%s" $fn | $SED -n -r -e "s/^C([0-9]*).*/\1/p" ) - if [ -z $res ]; then - print_err_msg_exit "\ -The resolution could not be extracted from the current file's name. The -full path to the file (fp) is: - fp = \"${fp}\" -This may be because fp contains the * globbing character, which would -imply that no files were found that match the globbing pattern specified -in fp." - fi - - if [ $i -gt 0 ] && [ ${res} != ${res_prev} ]; then - print_err_msg_exit "\ -The resolutions (as obtained from the file names) of the previous and -current file (fp_prev and fp, respectively) are different: - fp_prev = \"${fp_prev}\" - fp = \"${fp}\" -Please ensure that all files have the same resolution." - fi - - i=$((i+1)) - fp_prev="$fp" - res_prev=${res} - - done -# -#----------------------------------------------------------------------- -# -# If the output variable name is not set to a null string, set it. This -# variable is just the resolution extracted from the file names in the -# specified file group. Note that if the output variable name is not -# specified in the call to this function, the process_args function will -# set it to a null string, in which case no output variable will be set. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${output_varname_res_in_filenames}" ]; then - eval ${output_varname_res_in_filenames}="$res" - fi -# -#----------------------------------------------------------------------- -# -# Replace the * globbing character in the set of globbing patterns with -# the resolution. This will result in a set of (full paths to) specific -# files. -# -#----------------------------------------------------------------------- -# - fps=( "${fps[@]/\*/$res}" ) -# -#----------------------------------------------------------------------- -# -# In creating the various symlinks below, it is convenient to work in -# the FIXLAM directory. We will change directory back to the original -# later below. -# -#----------------------------------------------------------------------- -# - cd_vrfy "$FIXLAM" -# -#----------------------------------------------------------------------- -# -# Use the set of full file paths generated above as the link targets to -# create symlinks to these files in the FIXLAM directory. -# -#----------------------------------------------------------------------- -# -# If the task in consideration (which will be one of the pre-processing -# tasks MAKE_GRID_TN, MAKE_OROG_TN, and MAKE_SFC_CLIMO_TN) was run, then -# the target files will be located under the experiment directory. In -# this case, we use relative symlinks in order the experiment directory -# more portable and the symlinks more readable. However, if the task -# was not run, then pregenerated grid, orography, or surface climatology -# files will be used, and those will be located in an arbitrary directory -# (specified by the user) that is somwehere outside the experiment -# directory. Thus, in this case, there isn't really an advantage to using -# relative symlinks, so we use symlinks with absolute paths. -# - if [ "${run_task}" = "TRUE" ]; then - relative_link_flag="TRUE" - else - relative_link_flag="FALSE" - fi - - for fp in "${fps[@]}"; do - fn=$( basename $fp ) - create_symlink_to_file target="$fp" symlink="$fn" \ - relative="${relative_link_flag}" - done -# -#----------------------------------------------------------------------- -# -# Set the C-resolution based on the resolution appearing in the file -# names. -# -#----------------------------------------------------------------------- -# - cres="C$res" -# -#----------------------------------------------------------------------- -# -# If considering grid files, create a symlink to the halo4 grid file -# that does not contain the halo size in its name. This is needed by -# the tasks that generate the initial and lateral boundary condition -# files. -# -#----------------------------------------------------------------------- -# - if [ "${file_group}" = "grid" ]; then - target="${cres}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.halo${NH4}.nc" - symlink="${cres}${DOT_OR_USCORE}grid.tile${TILE_RGNL}.nc" - create_symlink_to_file target="$target" symlink="$symlink" relative="TRUE" - fi -# -#----------------------------------------------------------------------- -# -# If considering surface climatology files, create symlinks to the surface -# climatology files that do not contain the halo size in their names. -# These are needed by the task that generates the initial condition files. -# -#----------------------------------------------------------------------- -# - if [ "${file_group}" = "sfc_climo" ]; then - - tmp=( "${SFC_CLIMO_FIELDS[@]/#/${cres}.}" ) - fns_sfc_climo_with_halo_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH4}.nc}" ) - fns_sfc_climo_no_halo_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.nc}" ) - - for (( i=0; i<${num_fields}; i++ )); do - target="${fns_sfc_climo_with_halo_in_fn[$i]}" - symlink="${fns_sfc_climo_no_halo_in_fn[$i]}" - create_symlink_to_file target="$target" symlink="$symlink" relative="TRUE" - done -# -# In order to be able to specify the surface climatology file names in -# the forecast model's namelist file, in the FIXLAM directory a symlink -# must be created for each surface climatology field that has "tile1" in -# its name (and no "halo") and which points to the corresponding "tile7.halo0" -# file. -# - tmp=( "${SFC_CLIMO_FIELDS[@]/#/${cres}.}" ) - fns_sfc_climo_tile7_halo0_in_fn=( "${tmp[@]/%/.tile${TILE_RGNL}.halo${NH0}.nc}" ) - fns_sfc_climo_tile1_no_halo_in_fn=( "${tmp[@]/%/.tile1.nc}" ) - - for (( i=0; i<${num_fields}; i++ )); do - target="${fns_sfc_climo_tile7_halo0_in_fn[$i]}" - symlink="${fns_sfc_climo_tile1_no_halo_in_fn[$i]}" - create_symlink_to_file target="$target" symlink="$symlink" relative="TRUE" - done - - fi -# -#----------------------------------------------------------------------- -# -# Change directory back to original one. -# -#----------------------------------------------------------------------- -# - cd_vrfy - -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the start of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/predef_grid_params.yaml b/ush/predef_grid_params.yaml index 330a9a6472..5b3849fdd0 100644 --- a/ush/predef_grid_params.yaml +++ b/ush/predef_grid_params.yaml @@ -187,41 +187,6 @@ # #----------------------------------------------------------------------- # -# The RRFS CONUS domain with ~13km cells. -# -#----------------------------------------------------------------------- -# -"RRFS_CONUS_13km": - GRID_GEN_METHOD: "ESGgrid" - ESGgrid_LON_CTR: -97.5 - ESGgrid_LAT_CTR: 38.5 - ESGgrid_DELX: 13000.0 - ESGgrid_DELY: 13000.0 - ESGgrid_NX: 396 - ESGgrid_NY: 232 - ESGgrid_PAZI: 0.0 - ESGgrid_WIDE_HALO_WIDTH: 6 - DT_ATMOS: 45 - LAYOUT_X: 16 - LAYOUT_Y: 10 - BLOCKSIZE: 32 - #if QUILTING = True - WRTCMP_write_groups: 1 - WRTCMP_write_tasks_per_group: 10 - WRTCMP_output_grid: "lambert_conformal" - WRTCMP_cen_lon: -97.5 - WRTCMP_cen_lat: 38.5 - WRTCMP_stdlat1: 38.5 - WRTCMP_stdlat2: 38.5 - WRTCMP_nx: 393 - WRTCMP_ny: 225 - WRTCMP_lon_lwr_left: -121.70231097 - WRTCMP_lat_lwr_left: 22.57417972 - WRTCMP_dx: 13000.0 - WRTCMP_dy: 13000.0 -# -#----------------------------------------------------------------------- -# # The RRFS CONUS domain with ~3km cells. # #----------------------------------------------------------------------- @@ -496,7 +461,7 @@ GFDLgrid_LON_T6_CTR: -97.5 GFDLgrid_LAT_T6_CTR: 38.5 GFDLgrid_STRETCH_FAC: 1.4 - GFDLgrid_RES: 96 + GFDLgrid_NUM_CELLS: 96 GFDLgrid_REFINE_RATIO: 3 num_margin_cells_T6_left: 12 GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: 13 @@ -506,7 +471,7 @@ GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: 17 num_margin_cells_T6_top: 16 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: 80 - GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: True + GFDLgrid_USE_NUM_CELLS_IN_FILENAMES: True DT_ATMOS: 225 LAYOUT_X: 6 LAYOUT_Y: 4 @@ -539,7 +504,7 @@ GFDLgrid_LON_T6_CTR: -97.5 GFDLgrid_LAT_T6_CTR: 38.5 GFDLgrid_STRETCH_FAC: 1.5 - GFDLgrid_RES: 768 + GFDLgrid_NUM_CELLS: 768 GFDLgrid_REFINE_RATIO: 3 num_margin_cells_T6_left: 69 GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G: 70 @@ -549,7 +514,7 @@ GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G: 165 num_margin_cells_T6_top: 164 GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G: 604 - GFDLgrid_USE_GFDLgrid_RES_IN_FILENAMES: True + GFDLgrid_USE_NUM_CELLS_IN_FILENAMES: True DT_ATMOS: 18 LAYOUT_X: 30 LAYOUT_Y: 22 diff --git a/ush/python_utils/__init__.py b/ush/python_utils/__init__.py index 9371488d5c..3aa90e1549 100644 --- a/ush/python_utils/__init__.py +++ b/ush/python_utils/__init__.py @@ -1,22 +1,25 @@ -from .misc import uppercase, lowercase, find_pattern_in_str, find_pattern_in_file -from .check_for_preexist_dir_file import check_for_preexist_dir_file -from .check_var_valid_value import check_var_valid_value -from .count_files import count_files -from .create_symlink_to_file import create_symlink_to_file -from .define_macos_utilities import define_macos_utilities -from .environment import str_to_date, date_to_str, str_to_type, type_to_str, list_to_str, \ - str_to_list, set_env_var, get_env_var, import_vars, export_vars -from .filesys_cmds_vrfy import cmd_vrfy, cp_vrfy, mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, cd_vrfy -from .get_charvar_from_netcdf import get_charvar_from_netcdf -from .get_elem_inds import get_elem_inds -from .interpol_to_arbit_CRES import interpol_to_arbit_CRES -from .print_input_args import print_input_args -from .print_msg import print_info_msg, print_err_msg_exit -from .process_args import process_args -from .run_command import run_command -from .config_parser import load_yaml_config, cfg_to_yaml_str, \ - load_json_config, cfg_to_json_str, \ - load_ini_config, cfg_to_ini_str, get_ini_value, \ - load_shell_config, cfg_to_shell_str, \ - load_config_file -from .xml_parser import load_xml_file, has_tag_with_value +try: + from .misc import uppercase, lowercase, find_pattern_in_str, find_pattern_in_file, flatten_dict + from .check_for_preexist_dir_file import check_for_preexist_dir_file + from .check_var_valid_value import check_var_valid_value + from .count_files import count_files + from .create_symlink_to_file import create_symlink_to_file + from .define_macos_utilities import define_macos_utilities + from .environment import str_to_date, date_to_str, str_to_type, type_to_str, list_to_str, \ + str_to_list, set_env_var, get_env_var, import_vars, export_vars + from .filesys_cmds_vrfy import cmd_vrfy, cp_vrfy, mv_vrfy, rm_vrfy, ln_vrfy, mkdir_vrfy, cd_vrfy + from .get_elem_inds import get_elem_inds + from .interpol_to_arbit_CRES import interpol_to_arbit_CRES + from .print_input_args import print_input_args + from .print_msg import print_info_msg, print_err_msg_exit + from .process_args import process_args + from .run_command import run_command + from .get_charvar_from_netcdf import get_charvar_from_netcdf + from .xml_parser import load_xml_file, has_tag_with_value + from .config_parser import load_shell_config, cfg_to_shell_str, \ + load_json_config, cfg_to_json_str, \ + load_ini_config, cfg_to_ini_str, \ + get_ini_value, load_config_file, \ + load_yaml_config, cfg_to_yaml_str +except: + pass diff --git a/ush/python_utils/config_parser.py b/ush/python_utils/config_parser.py index da7131df26..b343551bfd 100644 --- a/ush/python_utils/config_parser.py +++ b/ush/python_utils/config_parser.py @@ -14,7 +14,10 @@ """ import argparse -import yaml +try: + import yaml +except: + pass import json import sys import os @@ -49,7 +52,10 @@ def join_str(loader, node): seq = loader.construct_sequence(node) return ''.join([str(i) for i in seq]) -yaml.add_constructor('!join_str', join_str, Loader=yaml.SafeLoader) +try: + yaml.add_constructor('!join_str', join_str, Loader=yaml.SafeLoader) +except: + pass ########## # JSON @@ -86,12 +92,15 @@ def load_shell_config(config_file): # Save env vars before and after sourcing the scipt and then # do a diff to get variables specifically defined/updated in the script # Method sounds brittle but seems to work ok so far + pid = os.getpid() code = dedent(f''' #!/bin/bash - (set -o posix; set) > ./_t1 + t1="./t1.{pid}" + t2="./t2.{pid}" + (set -o posix; set) > $t1 {{ . {config_file}; set +x; }} &>/dev/null - (set -o posix; set) > ./_t2 - diff ./_t1 ./_t2 | grep "> " | cut -c 3- - rm -rf ./_t1 ./_t2 + (set -o posix; set) > $t2 + diff $t1 $t2 | grep "> " | cut -c 3- + rm -rf $t1 $t2 ''') (_,config_str,_) = run_command(code) lines = config_str.splitlines() @@ -131,7 +140,7 @@ def load_ini_config(config_file): if not os.path.exists(config_file): print_err_msg_exit(f''' The specified configuration file does not exist: - \"{file_name}\"''') + \"{config_file}\"''') config = configparser.ConfigParser() config.read(config_file) diff --git a/ush/python_utils/environment.py b/ush/python_utils/environment.py index 25f03b8fd8..8123130557 100644 --- a/ush/python_utils/environment.py +++ b/ush/python_utils/environment.py @@ -4,25 +4,30 @@ import inspect import shlex from datetime import datetime, date +from types import ModuleType def str_to_date(s): """ Get python datetime object from string. - It tests for only two formats used in RRFS: YYYYMMDD and YYYYMMDDHHMM Args: s: a string Returns: datetime object or None """ + v = None try: - v = datetime.strptime(s, "%Y%m%d%H%M") - return v - except: - try: + l = len(s) + if l == 8: v = datetime.strptime(s, "%Y%m%d") - return v - except: - return None + if l == 10: + v = datetime.strptime(s, "%Y%m%d%H") + elif l == 12: + v = datetime.strptime(s, "%Y%m%d%H%M") + elif l == 14: + v = datetime.strptime(s, "%Y%m%d%H%M%S") + except: + v = None + return v def date_to_str(d,short=False): """ Get string from python datetime object. @@ -240,7 +245,9 @@ def export_vars(dictionary=None, source_dict=None, env_vars=None): # skip functions and other unlikely variable names if callable(v): continue - if not k or k.islower() or k[0] == '_': + if isinstance(v,ModuleType): + continue + if not k or k[0] == '_': continue dictionary[k] = list_to_str(v) diff --git a/ush/python_utils/misc.py b/ush/python_utils/misc.py index 1934ac3d6c..c299e02735 100644 --- a/ush/python_utils/misc.py +++ b/ush/python_utils/misc.py @@ -55,3 +55,21 @@ def find_pattern_in_file(pattern, file_name): return match.groups() return None +def flatten_dict(dictionary,keys=None): + """ Faltten a recursive dictionary (e.g.yaml/json) to be one level deep + Args: + dictionary: the source dictionary + keys: list of keys on top level whose contents to flatten, if None all of them + Returns: + A one-level deep dictionary for the selected set of keys + """ + flat_dict = {} + for k,v in dictionary.items(): + if not keys or k in keys: + if isinstance(v,dict): + r = flatten_dict(v) + flat_dict.update(r) + else: + flat_dict[k] = v + return flat_dict + diff --git a/ush/set_FV3nml_ens_stoch_seeds.py b/ush/set_FV3nml_ens_stoch_seeds.py index 9d9ae4b39e..b56979374f 100644 --- a/ush/set_FV3nml_ens_stoch_seeds.py +++ b/ush/set_FV3nml_ens_stoch_seeds.py @@ -1,14 +1,17 @@ #!/usr/bin/env python3 -import unittest import os +import sys +import argparse +import unittest from textwrap import dedent from datetime import datetime from python_utils import print_input_args, print_info_msg, print_err_msg_exit,\ - date_to_str, mkdir_vrfy,cp_vrfy,\ - import_vars,set_env_var,\ - define_macos_utilities, cfg_to_yaml_str + date_to_str, mkdir_vrfy, cp_vrfy, str_to_type, \ + import_vars,set_env_var, \ + define_macos_utilities, cfg_to_yaml_str, \ + load_shell_config from set_namelist import set_namelist @@ -109,6 +112,25 @@ def set_FV3nml_ens_stoch_seeds(cdate): settings = {settings_str}''')) +def parse_args(argv): + """ Parse command line arguments""" + parser = argparse.ArgumentParser( + description='Creates stochastic seeds for an ensemble experiment.' + ) + + parser.add_argument('-c', '--cdate', + dest='cdate', + required=True, + help='Date.') + + return parser.parse_args(argv) + +if __name__ == '__main__': + args = parse_args(sys.argv[1:]) + cfg = load_shell_config(args.path_to_defns) + import_vars(dictionary=cfg) + set_FV3nml_ens_stoch_seeds(str_to_type(args.cdate)) + class Testing(unittest.TestCase): def test_set_FV3nml_ens_stoch_seeds(self): set_FV3nml_ens_stoch_seeds(cdate=self.cdate) @@ -116,22 +138,23 @@ def setUp(self): define_macos_utilities(); set_env_var('DEBUG',True) set_env_var('VERBOSE',True) + self.cdate=datetime(2021, 1, 1) USHDIR = os.path.dirname(os.path.abspath(__file__)) EXPTDIR = os.path.join(USHDIR,"test_data","expt"); cp_vrfy(os.path.join(USHDIR,f'templates{os.sep}input.nml.FV3'), \ os.path.join(EXPTDIR,'input.nml')) - self.cdate=datetime(2021, 1, 1) + for i in range(2): + mkdir_vrfy("-p", os.path.join(EXPTDIR,f"{date_to_str(self.cdate,True)}{os.sep}mem{i+1}")) - mkdir_vrfy("-p", os.path.join(EXPTDIR,f'{date_to_str(self.cdate,True)}{os.sep}mem0')) set_env_var("USHDIR",USHDIR) set_env_var("CYCLE_BASEDIR",EXPTDIR) - set_env_var("ENSMEM_INDX",0) + set_env_var("ENSMEM_INDX",2) set_env_var("FV3_NML_FN","input.nml") set_env_var("FV3_NML_FP",os.path.join(EXPTDIR,"input.nml")) set_env_var("DO_SPP",True) set_env_var("DO_SHUM",True) set_env_var("DO_SKEB",True) set_env_var("DO_LSM_SPP",True) - ISEED_SPP = [ 4, 4, 4, 4, 4] + ISEED_SPP = [ 4, 5, 6, 7, 8] set_env_var("ISEED_SPP",ISEED_SPP) diff --git a/ush/set_FV3nml_ens_stoch_seeds.sh b/ush/set_FV3nml_ens_stoch_seeds.sh deleted file mode 100644 index 64322d6648..0000000000 --- a/ush/set_FV3nml_ens_stoch_seeds.sh +++ /dev/null @@ -1,187 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that, for an ensemble-enabled experiment -# (i.e. for an experiment for which the workflow configuration variable -# DO_ENSEMBLE has been set to "TRUE"), creates new namelist files with -# unique stochastic "seed" parameters, using a base namelist file in the -# ${EXPTDIR} directory as a template. These new namelist files are stored -# within each member directory housed within each cycle directory. Files -# of any two ensemble members differ only in their stochastic "seed" -# parameter values. These namelist files are generated when this file is -# called as part of the RUN_FCST_TN task. -# -#----------------------------------------------------------------------- -# -function set_FV3nml_ens_stoch_seeds() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "cdate" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local i \ - ensmem_num \ - fv3_nml_ens_fp \ - iseed_shum \ - iseed_skeb \ - iseed_sppt \ - iseed_spp \ - iseed_lsm_spp \ - num_iseed_spp \ - num_iseed_lsm_spp \ - settings -# -#----------------------------------------------------------------------- -# -# For a given cycle and member, generate a namelist file with unique -# seed values. -# -#----------------------------------------------------------------------- -# - ensmem_name="mem${ENSMEM_INDX}" - - fv3_nml_ensmem_fp="${CYCLE_BASEDIR}/${cdate}/${ensmem_name}/${FV3_NML_FN}" - - ensmem_num=$((10#${ENSMEM_INDX})) - - settings="\ -'nam_stochy': {" - - if [ ${DO_SPPT} = TRUE ]; then - - iseed_sppt=$(( cdate*1000 + ensmem_num*10 + 1 )) - settings="$settings - 'iseed_sppt': ${iseed_sppt}," - - fi - - if [ ${DO_SHUM} = TRUE ]; then - - iseed_shum=$(( cdate*1000 + ensmem_num*10 + 2 )) - settings="$settings - 'iseed_shum': ${iseed_shum}," - - fi - - if [ ${DO_SKEB} = TRUE ]; then - - iseed_skeb=$(( cdate*1000 + ensmem_num*10 + 3 )) - settings="$settings - 'iseed_skeb': ${iseed_skeb}," - - fi - settings="$settings - }" - - settings="$settings -'nam_sppperts': {" - - if [ ${DO_SPP} = TRUE ]; then - - num_iseed_spp=${#ISEED_SPP[@]} - for (( i=0; i<${num_iseed_spp}; i++ )); do - iseed_spp[$i]=$(( cdate*1000 + ensmem_num*10 + ${ISEED_SPP[$i]} )) - done - - settings="$settings - 'iseed_spp': [ $( printf "%s, " "${iseed_spp[@]}" ) ]," - - fi - - settings="$settings - }" - - settings="$settings -'nam_sfcperts': {" - - if [ ${DO_LSM_SPP} = TRUE ]; then - - iseed_lsm_spp=$(( cdate*1000 + ensmem_num*10 + 9)) - - settings="$settings - 'iseed_lndp': [ $( printf "%s, " "${iseed_lsm_spp[@]}" ) ]," - - fi - - settings="$settings - }" - - $USHDIR/set_namelist.py -q \ - -n ${FV3_NML_FP} \ - -u "$settings" \ - -o ${fv3_nml_ensmem_fp} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to set the variables in the FV3 -namelist file that specify the paths to the surface climatology files -failed. Parameters passed to this script are: - Full path to base namelist file: - FV3_NML_FP = \"${FV3_NML_FP}\" - Full path to output namelist file: - fv3_nml_ensmem_fp = \"${fv3_nml_ensmem_fp}\" - Namelist settings specified on command line (these have highest precedence): - settings = -$settings" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/set_FV3nml_sfc_climo_filenames.py b/ush/set_FV3nml_sfc_climo_filenames.py index 518f81ae36..c1a4d68c65 100644 --- a/ush/set_FV3nml_sfc_climo_filenames.py +++ b/ush/set_FV3nml_sfc_climo_filenames.py @@ -2,11 +2,13 @@ import unittest import os +import sys +import argparse from textwrap import dedent from python_utils import print_input_args, print_info_msg, print_err_msg_exit,\ check_var_valid_value,mv_vrfy,mkdir_vrfy,cp_vrfy,\ - rm_vrfy,import_vars,set_env_var,\ + rm_vrfy,import_vars,set_env_var,load_shell_config,\ define_macos_utilities,find_pattern_in_str,cfg_to_yaml_str from set_namelist import set_namelist @@ -92,6 +94,25 @@ def set_FV3nml_sfc_climo_filenames(): rm_vrfy(f'{fv3_nml_base_fp}') +def parse_args(argv): + """ Parse command line arguments""" + parser = argparse.ArgumentParser( + description='Set surface climatology fields.' + ) + + parser.add_argument('-p', '--path-to-defns', + dest='path_to_defns', + required=True, + help='Path to var_defns file.') + + return parser.parse_args(argv) + +if __name__ == '__main__': + args = parse_args(sys.argv[1:]) + cfg = load_shell_config(args.path_to_defns) + import_vars(dictionary=cfg) + set_FV3nml_sfc_climo_filenames() + class Testing(unittest.TestCase): def test_set_FV3nml_sfc_climo_filenames(self): set_FV3nml_sfc_climo_filenames() diff --git a/ush/set_FV3nml_sfc_climo_filenames.sh b/ush/set_FV3nml_sfc_climo_filenames.sh deleted file mode 100644 index d6f9dc5d4c..0000000000 --- a/ush/set_FV3nml_sfc_climo_filenames.sh +++ /dev/null @@ -1,231 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that sets the values of the variables in -# the forecast model's namelist file that specify the paths to the surface -# climatology files on the FV3LAM native grid (which are either pregenerated -# or created by the MAKE_SFC_CLIMO_TN task). Note that the workflow -# generation scripts create symlinks to these surface climatology files -# in the FIXLAM directory, and the values in the namelist file that get -# set by this function are relative or full paths to these links. -# -#----------------------------------------------------------------------- -# -function set_FV3nml_sfc_climo_filenames() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=() - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local regex_search \ - suffix \ - num_nml_vars \ - mapping \ - nml_var_name \ - sfc_climo_field_name \ - fp -# -#----------------------------------------------------------------------- -# -# In the forecast model's namelist file, set those variables representing -# the name of a fixed file that has associated with it a surface climatology -# file to the path to that surface climatology file. -# -# Note: -# The following symlinks that contain no "halo" in their names currently -# point to the halo4 surface climatology files. But it is not clear whether -# these should be pointing to the halo0 or halo4 files. Ask!!! -# -#----------------------------------------------------------------------- -# -# The regular expression regex_search set below will be used to extract -# from the elements of the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING -# the name of the namelist variable to set and the corresponding surface -# climatology field from which to form the name of the surface climatology -# file. This regular expression matches any string that consists of the -# following sequence: -# -# 1) Zero or more spaces at the beginning of the string, followed by -# 2) A sequence of one or more characters that does not include a space -# or a pipe (i.e. the "|" character; this sequence is the namelist -# variable name), followed by -# 3) Zero or more spaces, followed by -# 4) A pipe, followed by -# 5) A sequence of one or more characters that does not include a space -# or a pipe (this sequence is the surface climatology field associated -# with the namelist variable), followed by -# 6) Zero or more spaces at the end of the string. -# -regex_search="^[ ]*([^| ]+)[ ]*[|][ ]*([^| ]+)[ ]*$" -# -# Set the suffix of the surface climatology files. -# -# Questions: -# 1) Should we be using the halo0 or halo4 files? -# 2) For clarity, is it possible to use the actual name of the file (i.e. -# the actual ending that is either "tile7.halo0.nc" or "tile7.halo4.nc" -# instead of "tileX.nc"? -# -#suffix="tile${TILE_RGNL}.halo4.nc" -suffix="tileX.nc" -# -# Create a multiline variable that consists of a yaml-compliant string -# specifying the values that the namelist variables that specify the -# surface climatology file paths need to be set to (one namelist variable -# per line, plus a header and footer). Below, this variable will be -# passed to a python script that will create the namelist file. -# -# Note that the array FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING contains -# the mapping between the namelist variables and the surface climatology -# fields. Here, we loop through this array and process each element to -# construct each line of "settings". -# -settings="\ -'namsfc': {" - -dummy_run_dir="$EXPTDIR/any_cyc" -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then - dummy_run_dir="${dummy_run_dir}/any_ensmem" -fi - -num_nml_vars=${#FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING[@]} -for (( i=0; i<${num_nml_vars}; i++ )); do - - mapping="${FV3_NML_VARNAME_TO_SFC_CLIMO_FIELD_MAPPING[$i]}" - nml_var_name=$( printf "%s\n" "$mapping" | \ - $SED -n -r -e "s/${regex_search}/\1/p" ) - sfc_climo_field_name=$( printf "%s\n" "$mapping" | - $SED -n -r -e "s/${regex_search}/\2/p" ) -# -# Check that the surface climatology field associated with the current -# namelist variable is valid. -# - check_var_valid_value "sfc_climo_field_name" "SFC_CLIMO_FIELDS" -# -# Set the full path to the surface climatology file. -# - fp="${FIXLAM}/${CRES}.${sfc_climo_field_name}.$suffix" -# -# If not in NCO mode, for portability and brevity change fp so that it -# is a relative path (relative to any cycle directory immediately under -# the experiment directory). -# - if [ "${RUN_ENVIR}" != "nco" ]; then - fp=$( realpath --canonicalize-missing --relative-to="${dummy_run_dir}" "$fp" ) - fi -# -# Add a line to the variable "settings" that specifies (in a yaml-compliant -# format) the name of the current namelist variable and the value it should -# be set to. -# - settings="$settings - '${nml_var_name}': $fp," - -done - -settings="$settings - }" -# -# For debugging purposes, print out what "settings" has been set to. -# -print_info_msg $VERBOSE " -The variable \"settings\" specifying values of the namelist variables -has been set as follows: - -settings = -$settings" -# -#----------------------------------------------------------------------- -# -# Rename the FV3 namelist file for the experiment by appending the string -# ".base" to its name. The call to the set_namelist.py script below will -# use this file as the base (i.e. starting) namelist file, and it will -# modify it as specified by the varaible "settings" above, saving the -# result in a new FV3 namelist file for the experiment. Once this is -# done, we remove the base namelist file since it is no longer needed. -# -#----------------------------------------------------------------------- -# -fv3_nml_base_fp="${FV3_NML_FP}.base" -mv_vrfy "${FV3_NML_FP}" "${fv3_nml_base_fp}" - -$USHDIR/set_namelist.py -q \ - -n ${fv3_nml_base_fp} \ - -u "$settings" \ - -o ${FV3_NML_FP} || \ - print_err_msg_exit "\ -Call to python script set_namelist.py to set the variables in the FV3 -namelist file that specify the paths to the surface climatology files -failed. Parameters passed to this script are: - Full path to base namelist file: - fv3_nml_base_fp = \"${fv3_nml_base_fp}\" - Full path to output namelist file: - FV3_NML_FP = \"${FV3_NML_FP}\" - Namelist settings specified on command line (these have highest precedence): - settings = -$settings" - -rm_vrfy "${fv3_nml_base_fp}" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/set_cycle_dates.sh b/ush/set_cycle_dates.sh deleted file mode 100644 index e13c78d7ee..0000000000 --- a/ush/set_cycle_dates.sh +++ /dev/null @@ -1,145 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that, given the starting date (date_start, -# in the form YYYYMMDD), the ending date (date_end, in the form YYYYMMDD), -# and an array containing the cycle hours for each day (whose elements -# have the form HH), returns an array of cycle date-hours whose elements -# have the form YYYYMMDD. Here, YYYY is a four-digit year, MM is a two- -# digit month, DD is a two-digit day of the month, and HH is a two-digit -# hour of the day. -# -#----------------------------------------------------------------------- -# -function set_cycle_dates() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ -"date_start" \ -"date_end" \ -"cycle_hrs" \ -"incr_cycl_freq" \ -"output_varname_all_cdates" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local all_cdates date_crnt -# -#----------------------------------------------------------------------- -# -# Ensure that the ending date is at or after the starting date. -# -#----------------------------------------------------------------------- -# - if [ "${date_end}" -lt "${date_start}" ]; then - print_err_msg_exit "\ -End date (date_end) must be at or after start date (date_start): - date_start = \"${date_start}\" - date_end = \"${date_end}\"" - fi -# -#----------------------------------------------------------------------- -# -# In the following "while" loop, we begin with the starting date and -# increment by n days (incr_day) each time through the loop until we reach the ending -# date. For each date, we obtain an intermediate array of cdates (whose -# elements have the format YYYYMMDDHH) by prepending to the elements of -# cycle_hrs the current date. (Thus, this array has the same number of -# elements as cycle_hrs.) We then append this intermediate array to the -# final array that will contain all cdates (i.e. over all days and cycle -# hours). -# -#----------------------------------------------------------------------- -# - all_cdates=() - date_crnt="${date_start}" - - if [ "${incr_cycl_freq}" -le 24 ]; then - incr_day=1 - else - incr_day=$(( ${incr_cycl_freq} / 24 )) - incr_day_rem=$(( ${incr_cycl_freq} % 24 )) - - if [ "${incr_day_rem}" -gt 0 ];then - print_err_msg_exit "\ -INCR_CYCL_FREQ is not divided by 24: - INCR_CYCL_FREQ = \"${incr_cycl_freq}\"" - fi - fi - - while [ "${date_crnt}" -le "${date_end}" ]; do - all_cdates+=( $( printf "%s " ${cycle_hrs[@]/#/${date_crnt}} ) ) - date_crnt=$( $DATE_UTIL -d "${date_crnt} + ${incr_day} days" +%Y%m%d ) - done -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - all_cdates_str="( "$( printf "\"%s\" " "${all_cdates[@]}" )")" - eval ${output_varname_all_cdates}=${all_cdates_str} -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/set_extrn_mdl_params.sh b/ush/set_extrn_mdl_params.sh deleted file mode 100644 index 12b11e88da..0000000000 --- a/ush/set_extrn_mdl_params.sh +++ /dev/null @@ -1,35 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file sets some parameters that are model or mode specific. -# -#----------------------------------------------------------------------- -# -function set_extrn_mdl_params() { - # - #----------------------------------------------------------------------- - # - # Set EXTRN_MDL_LBCS_OFFSET_HRS, which is the number of hours to shift - # the starting time of the external model that provides lateral boundary - # conditions. - # - #----------------------------------------------------------------------- - # - case "${EXTRN_MDL_NAME_LBCS}" in - "RAP") - EXTRN_MDL_LBCS_OFFSET_HRS=${EXTRN_MDL_LBCS_OFFSET_HRS:-"3"} - ;; - *) - EXTRN_MDL_LBCS_OFFSET_HRS=${EXTRN_MDL_LBCS_OFFSET_HRS:-"0"} - ;; - esac -} - -# -#----------------------------------------------------------------------- -# -# Call the function defined above. -# -#----------------------------------------------------------------------- -# -set_extrn_mdl_params diff --git a/ush/set_gridparams_ESGgrid.py b/ush/set_gridparams_ESGgrid.py index c1ead9522f..722c2f27e2 100644 --- a/ush/set_gridparams_ESGgrid.py +++ b/ush/set_gridparams_ESGgrid.py @@ -3,7 +3,6 @@ import unittest from datetime import datetime,timedelta -from constants import radius_Earth,degs_per_radian from python_utils import import_vars, set_env_var, print_input_args def set_gridparams_ESGgrid(lon_ctr,lat_ctr,nx,ny,halo_width,delx,dely,pazi): @@ -24,6 +23,10 @@ def set_gridparams_ESGgrid(lon_ctr,lat_ctr,nx,ny,halo_width,delx,dely,pazi): """ print_input_args(locals()) + + # get needed environment variables + IMPORTS = ['RADIUS_EARTH', 'DEGS_PER_RADIAN'] + import_vars(env_vars=IMPORTS) # #----------------------------------------------------------------------- # @@ -57,8 +60,8 @@ def set_gridparams_ESGgrid(lon_ctr,lat_ctr,nx,ny,halo_width,delx,dely,pazi): # #----------------------------------------------------------------------- # - del_angle_x_sg = (delx / (2.0 * radius_Earth)) * degs_per_radian - del_angle_y_sg = (dely / (2.0 * radius_Earth)) * degs_per_radian + del_angle_x_sg = (delx / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN + del_angle_y_sg = (dely / (2.0 * RADIUS_EARTH)) * DEGS_PER_RADIAN neg_nx_of_dom_with_wide_halo = -(nx + 2 * halo_width) neg_ny_of_dom_with_wide_halo = -(ny + 2 * halo_width) # @@ -69,10 +72,10 @@ def set_gridparams_ESGgrid(lon_ctr,lat_ctr,nx,ny,halo_width,delx,dely,pazi): #----------------------------------------------------------------------- # return (lon_ctr,lat_ctr,nx,ny,pazi,halo_width,stretch_factor, - "{:0.10f}".format(del_angle_x_sg), - "{:0.10f}".format(del_angle_y_sg), - "{:.0f}".format(neg_nx_of_dom_with_wide_halo), - "{:.0f}".format(neg_ny_of_dom_with_wide_halo)) + del_angle_x_sg, + del_angle_y_sg, + int(neg_nx_of_dom_with_wide_halo), + int(neg_ny_of_dom_with_wide_halo)) class Testing(unittest.TestCase): def test_set_gridparams_ESGgrid(self): @@ -93,18 +96,18 @@ def test_set_gridparams_ESGgrid(self): self.assertEqual(\ (LON_CTR,LAT_CTR,NX,NY,PAZI,NHW,STRETCH_FAC, - DEL_ANGLE_X_SG, - DEL_ANGLE_Y_SG, + round(DEL_ANGLE_X_SG,10), + round(DEL_ANGLE_Y_SG,10), NEG_NX_OF_DOM_WITH_WIDE_HALO, NEG_NY_OF_DOM_WITH_WIDE_HALO), (-97.5, 38.5, 1748, 1038, 0.0, 6,0.999, - "0.0134894006", - "0.0134894006", - "-1760", - "-1050") + 0.0134894006, + 0.0134894006, + -1760, + -1050) ) def setUp(self): - set_env_var('DEBUG',True) - set_env_var('VERBOSE',True) + set_env_var('RADIUS_EARTH',6371200.0) + set_env_var('DEGS_PER_RADIAN',57.2957795131) diff --git a/ush/set_gridparams_ESGgrid.sh b/ush/set_gridparams_ESGgrid.sh deleted file mode 100644 index 4b81377713..0000000000 --- a/ush/set_gridparams_ESGgrid.sh +++ /dev/null @@ -1,215 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that sets the parameters -# for a grid that is to be generated using the "ESGgrid" grid generation -# method (i.e. GRID_GEN_METHOD set to "ESGgrid"). -# -#----------------------------------------------------------------------- -# -function set_gridparams_ESGgrid() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Set directories. -# -#----------------------------------------------------------------------- -# - local homerrfs=${scrfunc_dir%/*} - local ushdir="$homerrfs/ush" -# -#----------------------------------------------------------------------- -# -# Source the file containing various mathematical, physical, etc constants. -# -#----------------------------------------------------------------------- -# - . $ushdir/constants.sh -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. -# Then process the arguments provided to this script/function (which -# should consist of a set of name-value pairs of the form arg1="value1", -# etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "lon_ctr" \ - "lat_ctr" \ - "nx" \ - "ny" \ - "halo_width" \ - "delx" \ - "dely" \ - "pazi" \ - "outvarname_lon_ctr" \ - "outvarname_lat_ctr" \ - "outvarname_nx" \ - "outvarname_ny" \ - "outvarname_pazi" \ - "outvarname_halo_width" \ - "outvarname_stretch_factor" \ - "outvarname_del_angle_x_sg" \ - "outvarname_del_angle_y_sg" \ - "outvarname_neg_nx_of_dom_with_wide_halo" \ - "outvarname_neg_ny_of_dom_with_wide_halo" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# -# print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local stretch_factor \ - del_angle_x_sg \ - del_angle_y_sg \ - neg_nx_of_dom_with_wide_halo \ - neg_ny_of_dom_with_wide_halo -# -#----------------------------------------------------------------------- -# -# For a ESGgrid-type grid, the orography filtering is performed by pass- -# ing to the orography filtering the parameters for an "equivalent" glo- -# bal uniform cubed-sphere grid. These are the parameters that a global -# uniform cubed-sphere grid needs to have in order to have a nominal -# grid cell size equal to that of the (average) cell size on the region- -# al grid. These globally-equivalent parameters include a resolution -# (in units of number of cells in each of the two horizontal directions) -# and a stretch factor. The equivalent resolution is calculated in the -# script that generates the grid, and the stretch factor needs to be set -# to 1 because we are considering an equivalent globally UNIFORM grid. -# However, it turns out that with a non-symmetric regional grid (one in -# which nx is not equal to ny), setting stretch_factor to 1 fails be- -# cause the orography filtering program is designed for a global cubed- -# sphere grid and thus assumes that nx and ny for a given tile are equal -# when stretch_factor is exactly equal to 1. <-- Why is this? Seems like symmetry btwn x and y should still hold when the stretch factor is not equal to 1. -# It turns out that the program will work if we set stretch_factor to a -# value that is not exactly 1. This is what we do below. -# -#----------------------------------------------------------------------- -# - stretch_factor="0.999" # Check whether the orography program has been fixed so that we can set this to 1... -# -#----------------------------------------------------------------------- -# -# Set parameters needed as inputs to the regional_grid grid generation -# code. -# -#----------------------------------------------------------------------- -# - del_angle_x_sg=$( bc -l <<< "(${delx}/(2.0*${radius_Earth}))*${degs_per_radian}" ) - del_angle_x_sg=$( printf "%0.10f\n" ${del_angle_x_sg} ) - - del_angle_y_sg=$( bc -l <<< "(${dely}/(2.0*${radius_Earth}))*${degs_per_radian}" ) - del_angle_y_sg=$( printf "%0.10f\n" ${del_angle_y_sg} ) - - neg_nx_of_dom_with_wide_halo=$( bc -l <<< "-($nx + 2*${halo_width})" ) - neg_nx_of_dom_with_wide_halo=$( printf "%.0f\n" ${neg_nx_of_dom_with_wide_halo} ) - - neg_ny_of_dom_with_wide_halo=$( bc -l <<< "-($ny + 2*${halo_width})" ) - neg_ny_of_dom_with_wide_halo=$( printf "%.0f\n" ${neg_ny_of_dom_with_wide_halo} ) -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_lon_ctr}" ]; then - printf -v ${outvarname_lon_ctr} "%s" "${lon_ctr}" - fi - - if [ ! -z "${outvarname_lat_ctr}" ]; then - printf -v ${outvarname_lat_ctr} "%s" "${lat_ctr}" - fi - - if [ ! -z "${outvarname_nx}" ]; then - printf -v ${outvarname_nx} "%s" "${nx}" - fi - - if [ ! -z "${outvarname_ny}" ]; then - printf -v ${outvarname_ny} "%s" "${ny}" - fi - - if [ ! -z "${outvarname_halo_width}" ]; then - printf -v ${outvarname_halo_width} "%s" "${halo_width}" - fi - - if [ ! -z "${outvarname_stretch_factor}" ]; then - printf -v ${outvarname_stretch_factor} "%s" "${stretch_factor}" - fi - - if [ ! -z "${outvarname_pazi}" ]; then - printf -v ${outvarname_pazi} "%s" "${pazi}" - fi - - if [ ! -z "${outvarname_del_angle_x_sg}" ]; then - printf -v ${outvarname_del_angle_x_sg} "%s" "${del_angle_x_sg}" - fi - - if [ ! -z "${outvarname_del_angle_y_sg}" ]; then - printf -v ${outvarname_del_angle_y_sg} "%s" "${del_angle_y_sg}" - fi - - if [ ! -z "${outvarname_neg_nx_of_dom_with_wide_halo}" ]; then - printf -v ${outvarname_neg_nx_of_dom_with_wide_halo} "%s" "${neg_nx_of_dom_with_wide_halo}" - fi - - if [ ! -z "${outvarname_neg_ny_of_dom_with_wide_halo}" ]; then - printf -v ${outvarname_neg_ny_of_dom_with_wide_halo} "%s" "${neg_ny_of_dom_with_wide_halo}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/set_gridparams_GFDLgrid.py b/ush/set_gridparams_GFDLgrid.py index 0dab11f756..40a74d8105 100644 --- a/ush/set_gridparams_GFDLgrid.py +++ b/ush/set_gridparams_GFDLgrid.py @@ -2,8 +2,6 @@ import unittest -from constants import radius_Earth,degs_per_radian - from python_utils import import_vars, set_env_var, print_input_args, \ print_info_msg, print_err_msg_exit diff --git a/ush/set_gridparams_GFDLgrid.sh b/ush/set_gridparams_GFDLgrid.sh deleted file mode 100644 index 68b6937be1..0000000000 --- a/ush/set_gridparams_GFDLgrid.sh +++ /dev/null @@ -1,571 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that sets the parameters -# for a grid that is to be generated using the "GFDLgrid" grid genera- -# tion method (i.e. GRID_GEN_METHOD set to "GFDLgrid"). -# -#----------------------------------------------------------------------- -# -function set_gridparams_GFDLgrid() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Set directories. -# -#----------------------------------------------------------------------- -# - local homerrfs=${scrfunc_dir%/*} - local ushdir="$homerrfs/ush" -# -#----------------------------------------------------------------------- -# -# Source the file containing various mathematical, physical, etc constants. -# -#----------------------------------------------------------------------- -# - . $ushdir/constants.sh -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. -# Then process the arguments provided to this script/function (which -# should consist of a set of name-value pairs of the form arg1="value1", -# etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "lon_of_t6_ctr" \ - "lat_of_t6_ctr" \ - "res_of_t6g" \ - "stretch_factor" \ - "refine_ratio_t6g_to_t7g" \ - "istart_of_t7_on_t6g" \ - "iend_of_t7_on_t6g" \ - "jstart_of_t7_on_t6g" \ - "jend_of_t7_on_t6g" \ - "verbose" \ - "outvarname_lon_of_t7_ctr" \ - "outvarname_lat_of_t7_ctr" \ - "outvarname_nx_of_t7_on_t7g" \ - "outvarname_ny_of_t7_on_t7g" \ - "outvarname_halo_width_on_t7g" \ - "outvarname_stretch_factor" \ - "outvarname_istart_of_t7_with_halo_on_t6sg" \ - "outvarname_iend_of_t7_with_halo_on_t6sg" \ - "outvarname_jstart_of_t7_with_halo_on_t6sg" \ - "outvarname_jend_of_t7_with_halo_on_t6sg" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local nx_of_t6_on_t6g \ - ny_of_t6_on_t6g \ - num_left_margin_cells_on_t6g \ - num_right_margin_cells_on_t6g \ - num_bot_margin_cells_on_t6g \ - num_top_margin_cells_on_t6g \ - lon_of_t7_ctr \ - lat_of_t7_ctr \ - istart_of_t7_on_t6sg \ - iend_of_t7_on_t6sg \ - jstart_of_t7_on_t6sg \ - jend_of_t7_on_t6sg \ - halo_width_on_t7g \ - halo_width_on_t6sg \ - istart_of_t7_with_halo_on_t6sg \ - iend_of_t7_with_halo_on_t6sg \ - jstart_of_t7_with_halo_on_t6sg \ - jend_of_t7_with_halo_on_t6sg \ - halo_width_on_t6sg \ - halo_width_on_t6g \ - halo_width_on_t7g \ - nx_of_t7_on_t6sg \ - nx_of_t7_on_t6g \ - nx_of_t7_on_t7g \ - ny_of_t7_on_t6sg \ - ny_of_t7_on_t6g \ - ny_of_t7_on_t7g \ - nx_of_t6_on_t6sg \ - ny_of_t6_on_t6sg \ - prime_factors_nx_of_t7_on_t7g \ - prime_factors_ny_of_t7_on_t7g \ - nx_of_t7_with_halo_on_t6sg \ - nx_of_t7_with_halo_on_t6g \ - nx_of_t7_with_halo_on_t7g \ - ny_of_t7_with_halo_on_t6sg \ - ny_of_t7_with_halo_on_t6g \ - ny_of_t7_with_halo_on_t7g -# -#----------------------------------------------------------------------- -# -# To simplify the grid setup, we require that tile 7 be centered on tile -# 6. Note that this is not really a restriction because tile 6 can al- -# ways be moved so that it is centered on tile 7 [the location of tile 6 -# doesn't really matter because for a regional setup, the forecast model -# will only run on tile 7 (not on tiles 1-6)]. -# -# We now check that tile 7 is centered on tile 6 by checking (1) that -# the number of cells (on tile 6) between the left boundaries of these -# two tiles is equal to that between their right boundaries and (2) that -# the number of cells (on tile 6) between the bottom boundaries of these -# two tiles is equal to that between their top boundaries. If not, we -# print out an error message and exit. If so, we set the longitude and -# latitude of the center of tile 7 to those of tile 6 and continue. -# -#----------------------------------------------------------------------- -# - nx_of_t6_on_t6g=${res_of_t6g} - ny_of_t6_on_t6g=${res_of_t6g} - - num_left_margin_cells_on_t6g=$(( istart_of_t7_on_t6g - 1 )) - num_right_margin_cells_on_t6g=$(( nx_of_t6_on_t6g - iend_of_t7_on_t6g )) - -# This if-statement can hopefully be removed once EMC agrees to make their -# GFDLgrid type grids (tile 7) symmetric about tile 6. - if [ ${num_left_margin_cells_on_t6g} -ne ${num_right_margin_cells_on_t6g} ]; then - print_err_msg_exit "\ -In order for tile 7 to be centered in the x direction on tile 6, the x- -direction tile 6 cell indices at which tile 7 starts and ends (given by -istart_of_t7_on_t6g and iend_of_t7_on_t6g, respectively) must be set -such that the number of tile 6 cells in the margin between the left -boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is -equal to that in the margin between their right boundaries (given by -num_right_margin_cells_on_t6g): - istart_of_t7_on_t6g = ${istart_of_t7_on_t6g} - iend_of_t7_on_t6g = ${iend_of_t7_on_t6g} - num_left_margin_cells_on_t6g = ${num_left_margin_cells_on_t6g} - num_right_margin_cells_on_t6g = ${num_right_margin_cells_on_t6g} -Note that the total number of cells in the x-direction on tile 6 is gi- -ven by: - nx_of_t6_on_t6g = ${nx_of_t6_on_t6g} -Please reset istart_of_t7_on_t6g and iend_of_t7_on_t6g and rerun." - fi - - num_bot_margin_cells_on_t6g=$(( jstart_of_t7_on_t6g - 1 )) - num_top_margin_cells_on_t6g=$(( ny_of_t6_on_t6g - jend_of_t7_on_t6g )) - -# This if-statement can hopefully be removed once EMC agrees to make their -# GFDLgrid type grids (tile 7) symmetric about tile 6. - if [ ${num_bot_margin_cells_on_t6g} -ne ${num_top_margin_cells_on_t6g} ]; then - print_err_msg_exit "\ -In order for tile 7 to be centered in the y direction on tile 6, the y- -direction tile 6 cell indices at which tile 7 starts and ends (given by -jstart_of_t7_on_t6g and jend_of_t7_on_t6g, respectively) must be set -such that the number of tile 6 cells in the margin between the left -boundaries of tiles 6 and 7 (given by num_left_margin_cells_on_t6g) is -equal to that in the margin between their right boundaries (given by -num_right_margin_cells_on_t6g): - jstart_of_t7_on_t6g = ${jstart_of_t7_on_t6g} - jend_of_t7_on_t6g = ${jend_of_t7_on_t6g} - num_bot_margin_cells_on_t6g = ${num_bot_margin_cells_on_t6g} - num_top_margin_cells_on_t6g = ${num_top_margin_cells_on_t6g} -Note that the total number of cells in the y-direction on tile 6 is gi- -ven by: - ny_of_t6_on_t6g = ${ny_of_t6_on_t6g} -Please reset jstart_of_t7_on_t6g and jend_of_t7_on_t6g and rerun." - fi - - lon_of_t7_ctr="${lon_of_t6_ctr}" - lat_of_t7_ctr="${lat_of_t6_ctr}" -# -#----------------------------------------------------------------------- -# -# The grid generation script grid_gen_scr called below in turn calls the -# make_hgrid utility/executable to construct the regional grid. make_- -# hgrid accepts as arguments the index limits (i.e. starting and ending -# indices) of the regional grid on the supergrid of the regional grid's -# parent tile. The regional grid's parent tile is tile 6, and the su- -# pergrid of any given tile is defined as the grid obtained by doubling -# the number of cells in each direction on that tile's grid. We will -# denote these index limits by -# -# istart_of_t7_on_t6sg -# iend_of_t7_on_t6sg -# jstart_of_t7_on_t6sg -# jend_of_t7_on_t6sg -# -# The "_T6SG" suffix in these names is used to indicate that the indices -# are on the supergrid of tile 6. Recall, however, that we have as in- -# puts the index limits of the regional grid on the tile 6 grid, not its -# supergrid. These are given by -# -# istart_of_t7_on_t6g -# iend_of_t7_on_t6g -# jstart_of_t7_on_t6g -# jend_of_t7_on_t6g -# -# We can obtain the former from the latter by recalling that the super- -# grid has twice the resolution of the original grid. Thus, -# -# istart_of_t7_on_t6sg = 2*istart_of_t7_on_t6g - 1 -# iend_of_t7_on_t6sg = 2*iend_of_t7_on_t6g -# jstart_of_t7_on_t6sg = 2*jstart_of_t7_on_t6g - 1 -# jend_of_t7_on_t6sg = 2*jend_of_t7_on_t6g -# -# These are obtained assuming that grid cells on tile 6 must either be -# completely within the regional domain or completely outside of it, -# i.e. the boundary of the regional grid must coincide with gridlines -# on the tile 6 grid; it cannot cut through tile 6 cells. (Note that -# this implies that the starting indices on the tile 6 supergrid must be -# odd while the ending indices must be even; the above expressions sa- -# tisfy this requirement.) We perfrom these calculations next. -# -#----------------------------------------------------------------------- -# - istart_of_t7_on_t6sg=$(( 2*istart_of_t7_on_t6g - 1 )) - iend_of_t7_on_t6sg=$(( 2*iend_of_t7_on_t6g )) - jstart_of_t7_on_t6sg=$(( 2*jstart_of_t7_on_t6g - 1 )) - jend_of_t7_on_t6sg=$(( 2*jend_of_t7_on_t6g )) -# -#----------------------------------------------------------------------- -# -# If we simply pass to make_hgrid the index limits of the regional grid -# on the tile 6 supergrid calculated above, make_hgrid will generate a -# regional grid without a halo. To obtain a regional grid with a halo, -# we must pass to make_hgrid the index limits (on the tile 6 supergrid) -# of the regional grid including a halo. We will let the variables -# -# istart_of_t7_with_halo_on_t6sg -# iend_of_t7_with_halo_on_t6sg -# jstart_of_t7_with_halo_on_t6sg -# jend_of_t7_with_halo_on_t6sg -# -# denote these limits. The reason we include "_wide_halo" in these va- -# riable names is that the halo of the grid that we will first generate -# will be wider than the halos that are actually needed as inputs to the -# FV3LAM model (i.e. the 0-cell-wide, 3-cell-wide, and 4-cell-wide halos -# described above). We will generate the grids with narrower halos that -# the model needs later on by "shaving" layers of cells from this wide- -# halo grid. Next, we describe how to calculate the above indices. -# -# Let halo_width_on_t7g denote the width of the "wide" halo in units of number of -# grid cells on the regional grid (i.e. tile 7) that we'd like to have -# along all four edges of the regional domain (left, right, bottom, and -# top). To obtain the corresponding halo width in units of number of -# cells on the tile 6 grid -- which we denote by halo_width_on_t6g -- we simply di- -# vide halo_width_on_t7g by the refinement ratio, i.e. -# -# halo_width_on_t6g = halo_width_on_t7g/refine_ratio_t6g_to_t7g -# -# The corresponding halo width on the tile 6 supergrid is then given by -# -# halo_width_on_t6sg = 2*halo_width_on_t6g -# = 2*halo_width_on_t7g/refine_ratio_t6g_to_t7g -# -# Note that halo_width_on_t6sg must be an integer, but the expression for it de- -# rived above may not yield an integer. To ensure that the halo has a -# width of at least halo_width_on_t7g cells on the regional grid, we round up the -# result of the expression above for halo_width_on_t6sg, i.e. we redefine halo_width_on_t6sg -# to be -# -# halo_width_on_t6sg = ceil(2*halo_width_on_t7g/refine_ratio_t6g_to_t7g) -# -# where ceil(...) is the ceiling function, i.e. it rounds its floating -# point argument up to the next larger integer. Since in bash division -# of two integers returns a truncated integer and since bash has no -# built-in ceil(...) function, we perform the rounding-up operation by -# adding the denominator (of the argument of ceil(...) above) minus 1 to -# the original numerator, i.e. by redefining halo_width_on_t6sg to be -# -# halo_width_on_t6sg = (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g -# -# This trick works when dividing one positive integer by another. -# -# In order to calculate halo_width_on_t6g using the above expression, we must -# first specify halo_width_on_t7g. Next, we specify an initial value for it by -# setting it to one more than the largest-width halo that the model ac- -# tually needs, which is NH4. We then calculate halo_width_on_t6sg using the -# above expression. Note that these values of halo_width_on_t7g and halo_width_on_t6sg will -# likely not be their final values; their final values will be calcula- -# ted later below after calculating the starting and ending indices of -# the regional grid with wide halo on the tile 6 supergrid and then ad- -# justing the latter to satisfy certain conditions. -# -#----------------------------------------------------------------------- -# - halo_width_on_t7g=$(( NH4 + 1 )) - halo_width_on_t6sg=$(( (2*halo_width_on_t7g + refine_ratio_t6g_to_t7g - 1)/refine_ratio_t6g_to_t7g )) -# -#----------------------------------------------------------------------- -# -# With an initial value of halo_width_on_t6sg now available, we can obtain the -# tile 6 supergrid index limits of the regional domain (including the -# wide halo) from the index limits for the regional domain without a ha- -# lo by simply subtracting halo_width_on_t6sg from the lower index limits and add- -# ing halo_width_on_t6sg to the upper index limits, i.e. -# -# istart_of_t7_with_halo_on_t6sg = istart_of_t7_on_t6sg - halo_width_on_t6sg -# iend_of_t7_with_halo_on_t6sg = iend_of_t7_on_t6sg + halo_width_on_t6sg -# jstart_of_t7_with_halo_on_t6sg = jstart_of_t7_on_t6sg - halo_width_on_t6sg -# jend_of_t7_with_halo_on_t6sg = jend_of_t7_on_t6sg + halo_width_on_t6sg -# -# We calculate these next. -# -#----------------------------------------------------------------------- -# - istart_of_t7_with_halo_on_t6sg=$(( istart_of_t7_on_t6sg - halo_width_on_t6sg )) - iend_of_t7_with_halo_on_t6sg=$(( iend_of_t7_on_t6sg + halo_width_on_t6sg )) - jstart_of_t7_with_halo_on_t6sg=$(( jstart_of_t7_on_t6sg - halo_width_on_t6sg )) - jend_of_t7_with_halo_on_t6sg=$(( jend_of_t7_on_t6sg + halo_width_on_t6sg )) -# -#----------------------------------------------------------------------- -# -# As for the regional grid without a halo, the regional grid with a wide -# halo that make_hgrid will generate must be such that grid cells on -# tile 6 either lie completely within this grid or outside of it, i.e. -# they cannot lie partially within/outside of it. This implies that the -# starting indices on the tile 6 supergrid of the grid with wide halo -# must be odd while the ending indices must be even. Thus, below, we -# subtract 1 from the starting indices if they are even (which ensures -# that there will be at least halo_width_on_t7g halo cells along the left and bot- -# tom boundaries), and we add 1 to the ending indices if they are odd -# (which ensures that there will be at least halo_width_on_t7g halo cells along the -# right and top boundaries). -# -#----------------------------------------------------------------------- -# - if [ $(( istart_of_t7_with_halo_on_t6sg%2 )) -eq 0 ]; then - istart_of_t7_with_halo_on_t6sg=$(( istart_of_t7_with_halo_on_t6sg - 1 )) - fi - - if [ $(( iend_of_t7_with_halo_on_t6sg%2 )) -eq 1 ]; then - iend_of_t7_with_halo_on_t6sg=$(( iend_of_t7_with_halo_on_t6sg + 1 )) - fi - - if [ $(( jstart_of_t7_with_halo_on_t6sg%2 )) -eq 0 ]; then - jstart_of_t7_with_halo_on_t6sg=$(( jstart_of_t7_with_halo_on_t6sg - 1 )) - fi - - if [ $(( jend_of_t7_with_halo_on_t6sg%2 )) -eq 1 ]; then - jend_of_t7_with_halo_on_t6sg=$(( jend_of_t7_with_halo_on_t6sg + 1 )) - fi -# -#----------------------------------------------------------------------- -# -# Save the current shell options and temporarily turn off the xtrace op- -# tion to prevent clutter in stdout. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Now that the starting and ending tile 6 supergrid indices of the re- -# gional grid with the wide halo have been calculated (and adjusted), we -# recalculate the width of the wide halo on: -# -# 1) the tile 6 supergrid; -# 2) the tile 6 grid; and -# 3) the tile 7 grid. -# -# These are the final values of these quantities that are guaranteed to -# correspond to the starting and ending indices on the tile 6 supergrid. -# -#----------------------------------------------------------------------- -# - print_info_msg "$verbose" " -Original values of the halo width on the tile 6 supergrid and on the -tile 7 grid are: - halo_width_on_t6sg = ${halo_width_on_t6sg} - halo_width_on_t7g = ${halo_width_on_t7g}" - - halo_width_on_t6sg=$(( istart_of_t7_on_t6sg - istart_of_t7_with_halo_on_t6sg )) - halo_width_on_t6g=$(( halo_width_on_t6sg/2 )) - halo_width_on_t7g=$(( halo_width_on_t6g*refine_ratio_t6g_to_t7g )) - - print_info_msg "$verbose" " -Values of the halo width on the tile 6 supergrid and on the tile 7 grid -AFTER adjustments are: - halo_width_on_t6sg = ${halo_width_on_t6sg} - halo_width_on_t7g = ${halo_width_on_t7g}" -# -#----------------------------------------------------------------------- -# -# Calculate the number of cells that the regional domain (without halo) -# has in each of the two horizontal directions (say x and y). We denote -# these by nx_of_t7_on_t7g and ny_of_t7_on_t7g, respectively. These -# will be needed in the "shave" steps in the grid generation task of the -# workflow. -# -#----------------------------------------------------------------------- -# - nx_of_t7_on_t6sg=$(( iend_of_t7_on_t6sg - istart_of_t7_on_t6sg + 1 )) - nx_of_t7_on_t6g=$(( nx_of_t7_on_t6sg/2 )) - nx_of_t7_on_t7g=$(( nx_of_t7_on_t6g*refine_ratio_t6g_to_t7g )) - - ny_of_t7_on_t6sg=$(( jend_of_t7_on_t6sg - jstart_of_t7_on_t6sg + 1 )) - ny_of_t7_on_t6g=$(( ny_of_t7_on_t6sg/2 )) - ny_of_t7_on_t7g=$(( ny_of_t7_on_t6g*refine_ratio_t6g_to_t7g )) -# -# The following are set only for informational purposes. -# - nx_of_t6_on_t6sg=$(( 2*nx_of_t6_on_t6g )) - ny_of_t6_on_t6sg=$(( 2*ny_of_t6_on_t6g )) - - prime_factors_nx_of_t7_on_t7g=$( factor ${nx_of_t7_on_t7g} | $SED -r -e 's/^[0-9]+: (.*)/\1/' ) - prime_factors_ny_of_t7_on_t7g=$( factor ${ny_of_t7_on_t7g} | $SED -r -e 's/^[0-9]+: (.*)/\1/' ) - - print_info_msg "$verbose" " -The number of cells in the two horizontal directions (x and y) on the -parent tile's (tile 6) grid and supergrid are: - nx_of_t6_on_t6g = ${nx_of_t6_on_t6g} - ny_of_t6_on_t6g = ${ny_of_t6_on_t6g} - nx_of_t6_on_t6sg = ${nx_of_t6_on_t6sg} - ny_of_t6_on_t6sg = ${ny_of_t6_on_t6sg} - -The number of cells in the two horizontal directions on the tile 6 grid -and supergrid that the regional domain (tile 7) WITHOUT A HALO encompas- -ses are: - nx_of_t7_on_t6g = ${nx_of_t7_on_t6g} - ny_of_t7_on_t6g = ${ny_of_t7_on_t6g} - nx_of_t7_on_t6sg = ${nx_of_t7_on_t6sg} - ny_of_t7_on_t6sg = ${ny_of_t7_on_t6sg} - -The starting and ending i and j indices on the tile 6 grid used to gene- -rate this regional grid are: - istart_of_t7_on_t6g = ${istart_of_t7_on_t6g} - iend_of_t7_on_t6g = ${iend_of_t7_on_t6g} - jstart_of_t7_on_t6g = ${jstart_of_t7_on_t6g} - jend_of_t7_on_t6g = ${jend_of_t7_on_t6g} - -The corresponding starting and ending i and j indices on the tile 6 su- -pergrid are: - istart_of_t7_on_t6sg = ${istart_of_t7_on_t6sg} - iend_of_t7_on_t6sg = ${iend_of_t7_on_t6sg} - jstart_of_t7_on_t6sg = ${jstart_of_t7_on_t6sg} - jend_of_t7_on_t6sg = ${jend_of_t7_on_t6sg} - -The refinement ratio (ratio of the number of cells in tile 7 that abut -a single cell in tile 6) is: - refine_ratio_t6g_to_t7g = ${refine_ratio_t6g_to_t7g} - -The number of cells in the two horizontal directions on the regional do- -main's (i.e. tile 7's) grid WITHOUT A HALO are: - nx_of_t7_on_t7g = ${nx_of_t7_on_t7g} - ny_of_t7_on_t7g = ${ny_of_t7_on_t7g} - -The prime factors of nx_of_t7_on_t7g and ny_of_t7_on_t7g are (useful for -determining an MPI task layout): - prime_factors_nx_of_t7_on_t7g: ${prime_factors_nx_of_t7_on_t7g} - prime_factors_ny_of_t7_on_t7g: ${prime_factors_ny_of_t7_on_t7g}" -# -#----------------------------------------------------------------------- -# -# For informational purposes, calculate the number of cells in each di- -# rection on the regional grid including the wide halo (of width halo_- -# width_on_t7g cells). We denote these by nx_of_t7_with_halo_on_t7g and -# ny_of_t7_with_halo_on_t7g, respectively. -# -#----------------------------------------------------------------------- -# - nx_of_t7_with_halo_on_t6sg=$(( iend_of_t7_with_halo_on_t6sg - istart_of_t7_with_halo_on_t6sg + 1 )) - nx_of_t7_with_halo_on_t6g=$(( nx_of_t7_with_halo_on_t6sg/2 )) - nx_of_t7_with_halo_on_t7g=$(( nx_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) - - ny_of_t7_with_halo_on_t6sg=$(( jend_of_t7_with_halo_on_t6sg - jstart_of_t7_with_halo_on_t6sg + 1 )) - ny_of_t7_with_halo_on_t6g=$(( ny_of_t7_with_halo_on_t6sg/2 )) - ny_of_t7_with_halo_on_t7g=$(( ny_of_t7_with_halo_on_t6g*refine_ratio_t6g_to_t7g )) - - print_info_msg "$verbose" " -nx_of_t7_with_halo_on_t7g = ${nx_of_t7_with_halo_on_t7g} \ -(istart_of_t7_with_halo_on_t6sg = ${istart_of_t7_with_halo_on_t6sg}, \ -iend_of_t7_with_halo_on_t6sg = ${iend_of_t7_with_halo_on_t6sg})" - - print_info_msg "$verbose" " -ny_of_t7_with_halo_on_t7g = ${ny_of_t7_with_halo_on_t7g} \ -(jstart_of_t7_with_halo_on_t6sg = ${jstart_of_t7_with_halo_on_t6sg}, \ -jend_of_t7_with_halo_on_t6sg = ${jend_of_t7_with_halo_on_t6sg})" -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_lon_of_t7_ctr}" ]; then - printf -v ${outvarname_lon_of_t7_ctr} "%s" "${lon_of_t7_ctr}" - fi - - if [ ! -z "${outvarname_lat_of_t7_ctr}" ]; then - printf -v ${outvarname_lat_of_t7_ctr} "%s" "${lat_of_t7_ctr}" - fi - - if [ ! -z "${outvarname_nx_of_t7_on_t7g}" ]; then - printf -v ${outvarname_nx_of_t7_on_t7g} "%s" "${nx_of_t7_on_t7g}" - fi - - if [ ! -z "${outvarname_ny_of_t7_on_t7g}" ]; then - printf -v ${outvarname_ny_of_t7_on_t7g} "%s" "${ny_of_t7_on_t7g}" - fi - - if [ ! -z "${outvarname_halo_width_on_t7g}" ]; then - printf -v ${outvarname_halo_width_on_t7g} "%s" "${halo_width_on_t7g}" - fi - - if [ ! -z "${outvarname_stretch_factor}" ]; then - printf -v ${outvarname_stretch_factor} "%s" "${stretch_factor}" - fi - - if [ ! -z "${outvarname_istart_of_t7_with_halo_on_t6sg}" ]; then - printf -v ${outvarname_istart_of_t7_with_halo_on_t6sg} "%s" "${istart_of_t7_with_halo_on_t6sg}" - fi - - if [ ! -z "${outvarname_iend_of_t7_with_halo_on_t6sg}" ]; then - printf -v ${outvarname_iend_of_t7_with_halo_on_t6sg} "%s" "${iend_of_t7_with_halo_on_t6sg}" - fi - - if [ ! -z "${outvarname_jstart_of_t7_with_halo_on_t6sg}" ]; then - printf -v ${outvarname_jstart_of_t7_with_halo_on_t6sg} "%s" "${jstart_of_t7_with_halo_on_t6sg}" - fi - - if [ ! -z "${outvarname_jend_of_t7_with_halo_on_t6sg}" ]; then - printf -v ${outvarname_jend_of_t7_with_halo_on_t6sg} "%s" "${jend_of_t7_with_halo_on_t6sg}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options before turning off xtrace. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/set_ozone_param.sh b/ush/set_ozone_param.sh deleted file mode 100644 index bd6f5fd377..0000000000 --- a/ush/set_ozone_param.sh +++ /dev/null @@ -1,241 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that: -# -# (1) Determines the ozone parameterization being used by checking in the -# CCPP physics suite XML. -# -# (2) Sets the name of the global ozone production/loss file in the FIXgsm -# FIXgsm system directory to copy to the experiment's FIXam directory. -# -# (3) Resets the last element of the workflow array variable -# FIXgsm_FILES_TO_COPY_TO_FIXam that contains the files to copy from -# FIXgsm to FIXam (this last element is initially set to a dummy -# value) to the name of the ozone production/loss file set in the -# previous step. -# -# (4) Resets the element of the workflow array variable -# CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING (this array contains the -# mapping between the symlinks to create in any cycle directory and -# the files in the FIXam directory that are their targets) that -# specifies the mapping for the ozone symlink/file such that the -# target FIXam file name is set to the name of the ozone production/ -# loss file set above. -# -#----------------------------------------------------------------------- -# -function set_ozone_param() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ -"ccpp_phys_suite_fp" \ -"output_varname_ozone_param" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local ozone_param \ - regex_search \ - fixgsm_ozone_fn \ - i \ - ozone_symlink \ - fixgsm_ozone_fn_is_set \ - regex_search \ - num_symlinks \ - mapping \ - symlink \ - mapping_ozone \ - msg -# -#----------------------------------------------------------------------- -# -# Get the name of the ozone parameterization being used. There are two -# possible ozone parameterizations: -# -# (1) A parameterization developed/published in 2015. Here, we refer to -# this as the 2015 parameterization. If this is being used, then we -# set the variable ozone_param to the string "ozphys_2015". -# -# (2) A parameterization developed/published sometime after 2015. Here, -# we refer to this as the after-2015 parameterization. If this is -# being used, then we set the variable ozone_param to the string -# "ozphys". -# -# We check the CCPP physics suite definition file (SDF) to determine the -# parameterization being used. If this file contains the line -# -# ozphys_2015 -# -# then the 2015 parameterization is being used. If it instead contains -# the line -# -# ozphys -# -# then the after-2015 parameterization is being used. (The SDF should -# contain exactly one of these lines; not both nor neither; we check for -# this.) -# -#----------------------------------------------------------------------- -# - regex_search="^[ ]*(ozphys.*)<\/scheme>[ ]*$" - ozone_param=$( $SED -r -n -e "s/${regex_search}/\1/p" "${ccpp_phys_suite_fp}" ) - - if [ "${ozone_param}" = "ozphys_2015" ]; then - fixgsm_ozone_fn="ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77" - elif [ "${ozone_param}" = "ozphys" ]; then - fixgsm_ozone_fn="global_o3prdlos.f77" - else - print_err_msg_exit "\ -Unknown ozone parameterization (ozone_param) or no ozone parameterization -specified in the CCPP physics suite file (ccpp_phys_suite_fp): - ccpp_phys_suite_fp = \"${ccpp_phys_suite_fp}\" - ozone_param = \"${ozone_param}\"" - fi -# -#----------------------------------------------------------------------- -# -# Set the last element of the array FIXgsm_FILES_TO_COPY_TO_FIXam to the -# name of the ozone production/loss file to copy from the FIXgsm to the -# FIXam directory. -# -#----------------------------------------------------------------------- -# -i=$(( ${#FIXgsm_FILES_TO_COPY_TO_FIXam[@]} - 1 )) -FIXgsm_FILES_TO_COPY_TO_FIXam[$i]="${fixgsm_ozone_fn}" -# -#----------------------------------------------------------------------- -# -# Set the element in the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING that -# specifies the mapping between the symlink for the ozone production/loss -# file that must be created in each cycle directory and its target in the -# FIXam directory. The name of the symlink is alrady in the array, but -# the target is not because it depends on the ozone parameterization that -# the physics suite uses. Since we determined the ozone parameterization -# above, we now set the target of the symlink accordingly. -# -#----------------------------------------------------------------------- -# -ozone_symlink="global_o3prdlos.f77" -fixgsm_ozone_fn_is_set="FALSE" -regex_search="^[ ]*([^| ]*)[ ]*[|][ ]*([^| ]*)[ ]*$" -num_symlinks=${#CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[@]} - -for (( i=0; i<${num_symlinks}; i++ )); do - mapping="${CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[$i]}" - symlink=$( printf "%s\n" "$mapping" | \ - $SED -n -r -e "s/${regex_search}/\1/p" ) - if [ "$symlink" = "${ozone_symlink}" ]; then - regex_search="^[ ]*([^| ]+[ ]*)[|][ ]*([^| ]*)[ ]*$" - mapping_ozone=$( printf "%s\n" "$mapping" | \ - $SED -n -r -e "s/${regex_search}/\1/p" ) - mapping_ozone="${mapping_ozone}| ${fixgsm_ozone_fn}" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[$i]="${mapping_ozone}" - fixgsm_ozone_fn_is_set="TRUE" - break - fi -done -# -#----------------------------------------------------------------------- -# -# If fixgsm_ozone_fn_is_set is set to "TRUE", then the appropriate element -# of the array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING was set successfully. -# In this case, print out the new version of this array. Otherwise, print -# out an error message and exit. -# -#----------------------------------------------------------------------- -# -if [ "${fixgsm_ozone_fn_is_set}" = "TRUE" ]; then - - msg=" -After setting the file name of the ozone production/loss file in the -FIXgsm directory (based on the ozone parameterization specified in the -CCPP suite definition file), the array specifying the mapping between -the symlinks that need to be created in the cycle directories and the -files in the FIXam directory is: - - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = ( \\ -" - msg="$msg"$( printf "\"%s\" \\\\\n" "${CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[@]}" ) - msg="$msg"$( printf "\n)" ) - print_info_msg "$VERBOSE" "$msg" - -else - - print_err_msg_exit "\ -Unable to set name of the ozone production/loss file in the FIXgsm directory -in the array that specifies the mapping between the symlinks that need to -be created in the cycle directories and the files in the FIXgsm directory: - fixgsm_ozone_fn_is_set = \"${fixgsm_ozone_fn_is_set}\"" - -fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - eval ${output_varname_ozone_param}="${ozone_param}" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/set_predef_grid_params.py b/ush/set_predef_grid_params.py index 2838517150..970187e1de 100644 --- a/ush/set_predef_grid_params.py +++ b/ush/set_predef_grid_params.py @@ -3,8 +3,6 @@ import unittest import os -from constants import radius_Earth,degs_per_radian - from python_utils import process_args,import_vars,export_vars,set_env_var,get_env_var,\ print_input_args,define_macos_utilities, load_config_file, \ cfg_to_yaml_str @@ -18,9 +16,11 @@ def set_predef_grid_params(): None """ # import all environement variables - import_vars() + IMPORTS = ['PREDEF_GRID_NAME', 'QUILTING', 'DT_ATMOS', 'LAYOUT_X', 'LAYOUT_Y', 'BLOCKSIZE'] + import_vars(env_vars=IMPORTS) - params_dict = load_config_file("predef_grid_params.yaml") + USHDIR = os.path.dirname(os.path.abspath(__file__)) + params_dict = load_config_file(os.path.join(USHDIR,"predef_grid_params.yaml")) params_dict = params_dict[PREDEF_GRID_NAME] # if QUILTING = False, skip variables that start with "WRTCMP_" @@ -34,17 +34,14 @@ def set_predef_grid_params(): if globals()[var] is not None: params_dict[var] = globals()[var] - #export variables to environment + # export variables to environment export_vars(source_dict=params_dict) -# -#----------------------------------------------------------------------- -# -# Call the function defined above. -# -#----------------------------------------------------------------------- -# + + return params_dict + if __name__ == "__main__": - set_predef_grid_params() + params_dict = set_predef_grid_params() + print( cfg_to_shell_str(params_dict), end='' ) class Testing(unittest.TestCase): def test_set_predef_grid_params(self): diff --git a/ush/set_predef_grid_params.sh b/ush/set_predef_grid_params.sh deleted file mode 100644 index e0c89644df..0000000000 --- a/ush/set_predef_grid_params.sh +++ /dev/null @@ -1,1765 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that sets grid parameters -# for the specified predefined grid. -# -#----------------------------------------------------------------------- -# -function set_predef_grid_params() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Set directories. -# -#----------------------------------------------------------------------- -# - local homerrfs=${scrfunc_dir%/*} - local ushdir="$homerrfs/ush" -# -#----------------------------------------------------------------------- -# -# Source the file containing various mathematical, physical, etc constants. -# -#----------------------------------------------------------------------- -# - . $ushdir/constants.sh -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. -# Then process the arguments provided to this script/function (which -# should consist of a set of name-value pairs of the form arg1="value1", -# etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "predef_grid_name" \ - "dt_atmos" \ - "layout_x" \ - "layout_y" \ - "blocksize" \ - "quilting" \ - "outvarname_grid_gen_method" \ - "outvarname_esggrid_lon_ctr" \ - "outvarname_esggrid_lat_ctr" \ - "outvarname_esggrid_delx" \ - "outvarname_esggrid_dely" \ - "outvarname_esggrid_nx" \ - "outvarname_esggrid_ny" \ - "outvarname_esggrid_pazi" \ - "outvarname_esggrid_wide_halo_width" \ - "outvarname_gfdlgrid_lon_t6_ctr" \ - "outvarname_gfdlgrid_lat_t6_ctr" \ - "outvarname_gfdlgrid_stretch_fac" \ - "outvarname_gfdlgrid_num_cells" \ - "outvarname_gfdlgrid_refine_ratio" \ - "outvarname_gfdlgrid_istart_of_rgnl_dom_on_t6g" \ - "outvarname_gfdlgrid_iend_of_rgnl_dom_on_t6g" \ - "outvarname_gfdlgrid_jstart_of_rgnl_dom_on_t6g" \ - "outvarname_gfdlgrid_jend_of_rgnl_dom_on_t6g" \ - "outvarname_gfdlgrid_use_num_cells_in_filenames" \ - "outvarname_dt_atmos" \ - "outvarname_layout_x" \ - "outvarname_layout_y" \ - "outvarname_blocksize" \ - "outvarname_wrtcmp_write_groups" \ - "outvarname_wrtcmp_write_tasks_per_group" \ - "outvarname_wrtcmp_output_grid" \ - "outvarname_wrtcmp_cen_lon" \ - "outvarname_wrtcmp_cen_lat" \ - "outvarname_wrtcmp_stdlat1" \ - "outvarname_wrtcmp_stdlat2" \ - "outvarname_wrtcmp_nx" \ - "outvarname_wrtcmp_ny" \ - "outvarname_wrtcmp_lon_lwr_left" \ - "outvarname_wrtcmp_lat_lwr_left" \ - "outvarname_wrtcmp_lon_upr_rght" \ - "outvarname_wrtcmp_lat_upr_rght" \ - "outvarname_wrtcmp_dx" \ - "outvarname_wrtcmp_dy" \ - "outvarname_wrtcmp_dlon" \ - "outvarname_wrtcmp_dlat" \ - ) - process_args "valid_args" "$@" -# -#----------------------------------------------------------------------- -# -# Declare and initialize local variables. -# -#----------------------------------------------------------------------- -# - local __grid_gen_method__="" \ - __esggrid_lon_ctr__="" \ - __esggrid_lat_ctr__="" \ - __esggrid_delx__="" \ - __esggrid_dely__="" \ - __esggrid_nx__="" \ - __esggrid_ny__="" \ - __esggrid_pazi__="" \ - __esggrid_wide_halo_width__="" \ - __gfdlgrid_lon_t6_ctr__="" \ - __gfdlgrid_lat_t6_ctr__="" \ - __gfdlgrid_stretch_fac__="" \ - __gfdlgrid_num_cells__="" \ - __gfdlgrid_refine_ratio__="" \ - __gfdlgrid_istart_of_rgnl_dom_on_t6g__="" \ - __gfdlgrid_iend_of_rgnl_dom_on_t6g__="" \ - __gfdlgrid_jstart_of_rgnl_dom_on_t6g__="" \ - __gfdlgrid_jend_of_rgnl_dom_on_t6g__="" \ - __gfdlgrid_use_num_cells_in_filenames__="" \ - __dt_atmos__="" \ - __layout_x__="" \ - __layout_y__="" \ - __blocksize__="" \ - __wrtcmp_write_groups__="" \ - __wrtcmp_write_tasks_per_group__="" \ - __wrtcmp_output_grid__="" \ - __wrtcmp_cen_lon__="" \ - __wrtcmp_cen_lat__="" \ - __wrtcmp_stdlat1__="" \ - __wrtcmp_stdlat2__="" \ - __wrtcmp_nx__="" \ - __wrtcmp_ny__="" \ - __wrtcmp_lon_lwr_left__="" \ - __wrtcmp_lat_lwr_left__="" \ - __wrtcmp_lon_upr_rght__="" \ - __wrtcmp_lat_upr_rght__="" \ - __wrtcmp_dx__="" \ - __wrtcmp_dy__="" \ - __wrtcmp_dlon__="" \ - __wrtcmp_dlat__="" \ - num_margin_cells_T6_left="" \ - num_margin_cells_T6_right="" \ - num_margin_cells_T6_bottom="" \ - num_margin_cells_T6_top="" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# -# print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Make sure that the input argument "quilting" is set to a valid value. -# -#----------------------------------------------------------------------- -# - check_var_valid_value "quilting" "valid_vals_BOOLEAN" - quilting=$(boolify "$quilting") -# -#----------------------------------------------------------------------- -# -# Set grid and other parameters according to the value of the predefined -# domain (predef_grid_name). Note that the code will enter this script -# only if predef_grid_name has a valid (and non-empty) value. -# -#################### -# The following comments need to be updated: -#################### -# -# 1) Reset the experiment title (expt_title). -# 2) Reset the grid parameters. -# 3) If the write component is to be used (i.e. "quilting" is set to -# "TRUE") and the variable WRTCMP_PARAMS_TMPL_FN containing the name -# of the write-component template file is unset or empty, set that -# filename variable to the appropriate preexisting template file. -# -# For the predefined domains, we determine the starting and ending indi- -# ces of the regional grid within tile 6 by specifying margins (in units -# of number of cells on tile 6) between the boundary of tile 6 and that -# of the regional grid (tile 7) along the left, right, bottom, and top -# portions of these boundaries. Note that we do not use "west", "east", -# "south", and "north" here because the tiles aren't necessarily orient- -# ed such that the left boundary segment corresponds to the west edge, -# etc. The widths of these margins (in units of number of cells on tile -# 6) are specified via the parameters -# -# num_margin_cells_T6_left -# num_margin_cells_T6_right -# num_margin_cells_T6_bottom -# num_margin_cells_T6_top -# -# where the "_T6" in these names is used to indicate that the cell count -# is on tile 6, not tile 7. -# -# Note that we must make the margins wide enough (by making the above -# four parameters large enough) such that a region of halo cells around -# the boundary of the regional grid fits into the margins, i.e. such -# that the halo does not overrun the boundary of tile 6. (The halo is -# added later in another script; its function is to feed in boundary -# conditions to the regional grid.) Currently, a halo of 5 regional -# grid cells is used around the regional grid. Setting num_margin_- -# cells_T6_... to at least 10 leaves enough room for this halo. -# -#----------------------------------------------------------------------- -# - case "${predef_grid_name}" in -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~25km cells. -# -#----------------------------------------------------------------------- -# - "RRFS_CONUS_25km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="25000.0" - __esggrid_dely__="25000.0" - - __esggrid_nx__="219" - __esggrid_ny__="131" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-40}" - - __layout_x__="${layout_x:-5}" - __layout_y__="${layout_y:-2}" - __blocksize__="${blocksize:-40}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="2" - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="217" - __wrtcmp_ny__="128" - __wrtcmp_lon_lwr_left__="-122.719528" - __wrtcmp_lat_lwr_left__="21.138123" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~25km cells that can be initialized from -# the HRRR. -# -#----------------------------------------------------------------------- -# - "RRFS_CONUScompact_25km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="25000.0" - __esggrid_dely__="25000.0" - - __esggrid_nx__="202" - __esggrid_ny__="116" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-40}" - - __layout_x__="${layout_x:-5}" - __layout_y__="${layout_y:-2}" - __blocksize__="${blocksize:-40}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="2" - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="199" - __wrtcmp_ny__="111" - __wrtcmp_lon_lwr_left__="-121.23349066" - __wrtcmp_lat_lwr_left__="23.41731593" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~13km cells. -# -#----------------------------------------------------------------------- -# - "RRFS_CONUS_13km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="13000.0" - __esggrid_dely__="13000.0" - - __esggrid_nx__="420" - __esggrid_ny__="252" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-45}" - - __layout_x__="${layout_x:-16}" - __layout_y__="${layout_y:-10}" - __blocksize__="${blocksize:-32}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="416" - __wrtcmp_ny__="245" - __wrtcmp_lon_lwr_left__="-122.719528" - __wrtcmp_lat_lwr_left__="21.138123" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~13km cells that can be initialized from the HRRR. -# -#----------------------------------------------------------------------- -# - "RRFS_CONUScompact_13km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="13000.0" - __esggrid_dely__="13000.0" - - __esggrid_nx__="396" - __esggrid_ny__="232" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-45}" - - __layout_x__="${layout_x:-16}" - __layout_y__="${layout_y:-10}" - __blocksize__="${blocksize:-32}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="393" - __wrtcmp_ny__="225" - __wrtcmp_lon_lwr_left__="-121.70231097" - __wrtcmp_lat_lwr_left__="22.57417972" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~3km cells. -# -#----------------------------------------------------------------------- -# - "RRFS_CONUS_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="1820" - __esggrid_ny__="1092" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-36}" - - __layout_x__="${layout_x:-28}" - __layout_y__="${layout_y:-28}" - __blocksize__="${blocksize:-29}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="1799" - __wrtcmp_ny__="1059" - __wrtcmp_lon_lwr_left__="-122.719528" - __wrtcmp_lat_lwr_left__="21.138123" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS CONUS domain with ~3km cells that can be initialized from -# the HRRR. -# -#----------------------------------------------------------------------- -# - "RRFS_CONUScompact_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="1748" - __esggrid_ny__="1038" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-40}" - - __layout_x__="${layout_x:-30}" - __layout_y__="${layout_y:-16}" - __blocksize__="${blocksize:-32}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="1746" - __wrtcmp_ny__="1014" - __wrtcmp_lon_lwr_left__="-122.17364391" - __wrtcmp_lat_lwr_left__="21.88588562" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS SUBCONUS domain with ~3km cells. -# -#----------------------------------------------------------------------- -# - "RRFS_SUBCONUS_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="35.0" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="840" - __esggrid_ny__="600" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-40}" - - __layout_x__="${layout_x:-30}" - __layout_y__="${layout_y:-24}" - __blocksize__="${blocksize:-35}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="837" - __wrtcmp_ny__="595" - __wrtcmp_lon_lwr_left__="-109.97410429" - __wrtcmp_lat_lwr_left__="26.31459843" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# A subconus domain over Indianapolis, Indiana with ~3km cells. This is -# mostly for testing on a 3km grid with a much small number of cells than -# on the full CONUS. -# -#----------------------------------------------------------------------- -# - "SUBCONUS_Ind_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-86.16" - __esggrid_lat_ctr__="39.77" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="200" - __esggrid_ny__="200" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-40}" - - __layout_x__="${layout_x:-5}" - __layout_y__="${layout_y:-5}" - __blocksize__="${blocksize:-40}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="197" - __wrtcmp_ny__="197" - __wrtcmp_lon_lwr_left__="-89.47120417" - __wrtcmp_lat_lwr_left__="37.07809642" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS Alaska domain with ~13km cells. -# -# Note: -# This grid has not been thoroughly tested (as of 20201027). -# -#----------------------------------------------------------------------- -# - "RRFS_AK_13km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-161.5" - __esggrid_lat_ctr__="63.0" - - __esggrid_delx__="13000.0" - __esggrid_dely__="13000.0" - - __esggrid_nx__="320" - __esggrid_ny__="240" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - -# __dt_atmos__="${dt_atmos:-50}" - __dt_atmos__="${dt_atmos:-10}" - - __layout_x__="${layout_x:-16}" - __layout_y__="${layout_y:-12}" - __blocksize__="${blocksize:-40}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - -# The following works. The numbers were obtained using the NCL scripts -# but only after manually modifying the longitutes of two of the four -# corners of the domain to add 360.0 to them. Need to automate that -# procedure. - __wrtcmp_nx__="318" - __wrtcmp_ny__="234" -# __wrtcmp_lon_lwr_left__="-187.76660836" - __wrtcmp_lon_lwr_left__="172.23339164" - __wrtcmp_lat_lwr_left__="45.77691870" - - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - -# The following rotated_latlon coordinate system parameters were obtained -# using the NCL code and work. -# if [ "$quilting" = "TRUE" ]; then -# __wrtcmp_write_groups__="1" -# __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) -# __wrtcmp_output_grid__="rotated_latlon" -# __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" -# __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" -# __wrtcmp_lon_lwr_left__="-18.47206579" -# __wrtcmp_lat_lwr_left__="-13.56176982" -# __wrtcmp_lon_upr_rght__="18.47206579" -# __wrtcmp_lat_upr_rght__="13.56176982" -## __wrtcmp_dlon__="0.11691181" -## __wrtcmp_dlat__="0.11691181" -# __wrtcmp_dlon__=$( printf "%.9f" $( bc -l <<< "(${esggrid_delx}/${radius_Earth})*${degs_per_radian}" ) ) -# __wrtcmp_dlat__=$( printf "%.9f" $( bc -l <<< "(${esggrid_dely}/${radius_Earth})*${degs_per_radian}" ) ) -# fi - ;; -# -#----------------------------------------------------------------------- -# -# The RRFS Alaska domain with ~3km cells. -# -# Note: -# This grid has not been thoroughly tested (as of 20201027). -# -#----------------------------------------------------------------------- -# - "RRFS_AK_3km") - -# if [ "${grid_gen_method}" = "GFDLgrid" ]; then -# -# __gfdlgrid_lon_t6_ctr__="-160.8" -# __gfdlgrid_lat_t6_ctr__="63.0" -# __gfdlgrid_stretch_fac__="1.161" -# __gfdlgrid_num_cells__="768" -# __gfdlgrid_refine_ratio__="4" -# -# num_margin_cells_T6_left="204" -# __gfdlgrid_istart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_left + 1 )) -# -# num_margin_cells_T6_right="204" -# __gfdlgrid_iend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_right )) -# -# num_margin_cells_T6_bottom="249" -# __gfdlgrid_jstart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_bottom + 1 )) -# -# num_margin_cells_T6_top="249" -# __gfdlgrid_jend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_top )) -# -# __gfdlgrid_use_num_cells_in_filenames__="FALSE" -# -# __dt_atmos__="${dt_atmos:-18}" -# -# __layout_x__="${layout_x:-24}" -# __layout_y__="${layout_y:-24}" -# __blocksize__="${blocksize:-15}" -# -# if [ "$quilting" = "TRUE" ]; then -# __wrtcmp_write_groups__="1" -# __wrtcmp_write_tasks_per_group__="2" -# __wrtcmp_output_grid__="lambert_conformal" -# __wrtcmp_cen_lon__="${__gfdlgrid_lon_t6_ctr__}" -# __wrtcmp_cen_lat__="${__gfdlgrid_lat_t6_ctr__}" -# __wrtcmp_stdlat1__="${__gfdlgrid_lat_t6_ctr__}" -# __wrtcmp_stdlat2__="${__gfdlgrid_lat_t6_ctr__}" -# __wrtcmp_nx__="1320" -# __wrtcmp_ny__="950" -# __wrtcmp_lon_lwr_left__="173.734" -# __wrtcmp_lat_lwr_left__="46.740347" -# __wrtcmp_dx__="3000.0" -# __wrtcmp_dy__="3000.0" -# fi -# -# elif [ "${grid_gen_method}" = "ESGgrid" ]; then - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-161.5" - __esggrid_lat_ctr__="63.0" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="1380" - __esggrid_ny__="1020" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - -# __dt_atmos__="${dt_atmos:-50}" - __dt_atmos__="${dt_atmos:-10}" - - __layout_x__="${layout_x:-30}" - __layout_y__="${layout_y:-17}" - __blocksize__="${blocksize:-40}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="1379" - __wrtcmp_ny__="1003" - __wrtcmp_lon_lwr_left__="-187.89737923" - __wrtcmp_lat_lwr_left__="45.84576053" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# The WoFS domain with ~3km cells. -# -# Note: -# The WoFS domain will generate a 301 x 301 output grid (WRITE COMPONENT) and -# will eventually be movable (esggrid_lon_ctr/esggrid_lat_ctr). A python script -# python_utils/fv3write_parms_lambert will be useful to determine -# wrtcmp_lon_lwr_left and wrtcmp_lat_lwr_left locations (only for Lambert map -# projection currently) of the quilting output when the domain location is -# moved. Later, it should be integrated into the workflow. -# -#----------------------------------------------------------------------- -# - "WoFS_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-97.5" - __esggrid_lat_ctr__="38.5" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="361" - __esggrid_ny__="361" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-20}" - - __layout_x__="${layout_x:-18}" - __layout_y__="${layout_y:-12}" - __blocksize__="${blocksize:-30}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="301" - __wrtcmp_ny__="301" - __wrtcmp_lon_lwr_left__="-102.3802487" - __wrtcmp_lat_lwr_left__="34.3407918" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# A CONUS domain of GFDLgrid type with ~25km cells. -# -# Note: -# This grid is larger than the HRRRX domain and thus cannot be initialized -# using the HRRRX. -# -#----------------------------------------------------------------------- -# - "CONUS_25km_GFDLgrid") - - __grid_gen_method__="GFDLgrid" - - __gfdlgrid_lon_t6_ctr__="-97.5" - __gfdlgrid_lat_t6_ctr__="38.5" - __gfdlgrid_stretch_fac__="1.4" - __gfdlgrid_num_cells__="96" - __gfdlgrid_refine_ratio__="3" - - num_margin_cells_T6_left="12" - __gfdlgrid_istart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_left + 1 )) - - num_margin_cells_T6_right="12" - __gfdlgrid_iend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_right )) - - num_margin_cells_T6_bottom="16" - __gfdlgrid_jstart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_bottom + 1 )) - - num_margin_cells_T6_top="16" - __gfdlgrid_jend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_top )) - - __gfdlgrid_use_num_cells_in_filenames__="TRUE" - - __dt_atmos__="${dt_atmos:-225}" - - __layout_x__="${layout_x:-6}" - __layout_y__="${layout_y:-4}" - __blocksize__="${blocksize:-36}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="rotated_latlon" - __wrtcmp_cen_lon__="${__gfdlgrid_lon_t6_ctr__}" - __wrtcmp_cen_lat__="${__gfdlgrid_lat_t6_ctr__}" - __wrtcmp_lon_lwr_left__="-24.40085141" - __wrtcmp_lat_lwr_left__="-19.65624142" - __wrtcmp_lon_upr_rght__="24.40085141" - __wrtcmp_lat_upr_rght__="19.65624142" - __wrtcmp_dlon__="0.22593381" - __wrtcmp_dlat__="0.22593381" - fi - ;; -# -#----------------------------------------------------------------------- -# -# A CONUS domain of GFDLgrid type with ~3km cells. -# -# Note: -# This grid is larger than the HRRRX domain and thus cannot be initialized -# using the HRRRX. -# -#----------------------------------------------------------------------- -# - "CONUS_3km_GFDLgrid") - - __grid_gen_method__="GFDLgrid" - - __gfdlgrid_lon_t6_ctr__="-97.5" - __gfdlgrid_lat_t6_ctr__="38.5" - __gfdlgrid_stretch_fac__="1.5" - __gfdlgrid_num_cells__="768" - __gfdlgrid_refine_ratio__="3" - - num_margin_cells_T6_left="69" - __gfdlgrid_istart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_left + 1 )) - - num_margin_cells_T6_right="69" - __gfdlgrid_iend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_right )) - - num_margin_cells_T6_bottom="164" - __gfdlgrid_jstart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_bottom + 1 )) - - num_margin_cells_T6_top="164" - __gfdlgrid_jend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_top )) - - __gfdlgrid_use_num_cells_in_filenames__="TRUE" - - __dt_atmos__="${dt_atmos:-18}" - - __layout_x__="${layout_x:-30}" - __layout_y__="${layout_y:-22}" - __blocksize__="${blocksize:-35}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__=$(( 1*__layout_y__ )) - __wrtcmp_output_grid__="rotated_latlon" - __wrtcmp_cen_lon__="${__gfdlgrid_lon_t6_ctr__}" - __wrtcmp_cen_lat__="${__gfdlgrid_lat_t6_ctr__}" - __wrtcmp_lon_lwr_left__="-25.23144805" - __wrtcmp_lat_lwr_left__="-15.82130419" - __wrtcmp_lon_upr_rght__="25.23144805" - __wrtcmp_lat_upr_rght__="15.82130419" - __wrtcmp_dlon__="0.02665763" - __wrtcmp_dlat__="0.02665763" - fi - ;; -# -#----------------------------------------------------------------------- -# -# EMC's Alaska grid. -# -#----------------------------------------------------------------------- -# - "EMC_AK") - -# if [ "${grid_gen_method}" = "GFDLgrid" ]; then - -# Values from an EMC script. - -### rocoto items -# -#fcstnodes=68 -#bcnodes=11 -#postnodes=2 -#goespostnodes=5 -#goespostthrottle=6 -#sh=06 -#eh=18 -# -### namelist items -# -#task_layout_x=16 -#task_layout_y=48 -#npx=1345 -#npy=1153 -#target_lat=61.0 -#target_lon=-153.0 -# -### model config items -# -#write_groups=2 -#write_tasks_per_group=24 -#cen_lon=$target_lon -#cen_lat=$target_lat -#lon1=-18.0 -#lat1=-14.79 -#lon2=18.0 -#lat2=14.79 -#dlon=0.03 -#dlat=0.03 - -# __gfdlgrid_lon_t6_ctr__="-153.0" -# __gfdlgrid_lat_t6_ctr__="61.0" -# __gfdlgrid_stretch_fac__="1.0" # ??? -# __gfdlgrid_num_cells__="768" -# __gfdlgrid_refine_ratio__="3" # ??? -# -# num_margin_cells_T6_left="61" -# __gfdlgrid_istart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_left + 1 )) -# -# num_margin_cells_T6_right="67" -# __gfdlgrid_iend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_right )) -# -# num_margin_cells_T6_bottom="165" -# __gfdlgrid_jstart_of_rgnl_dom_on_t6g__=$(( num_margin_cells_T6_bottom + 1 )) -# -# num_margin_cells_T6_top="171" -# __gfdlgrid_jend_of_rgnl_dom_on_t6g__=$(( __gfdlgrid_num_cells__ - num_margin_cells_T6_top )) -# -# __gfdlgrid_use_num_cells_in_filenames__="TRUE" -# -# __dt_atmos__="${dt_atmos:-18}" -# -# __layout_x__="${layout_x:-16}" -# __layout_y__="${layout_y:-48}" -# __wrtcmp_write_groups__="2" -# __wrtcmp_write_tasks_per_group__="24" -# __blocksize__="${blocksize:-32}" -# -# elif [ "${grid_gen_method}" = "ESGgrid" ]; then - - __grid_gen_method__="ESGgrid" - -# Values taken from pre-generated files in /scratch4/NCEPDEV/fv3-cam/save/Benjamin.Blake/regional_workflow/fix/fix_sar -# With move to Hera, those files were lost; a backup can be found here: /scratch2/BMC/det/kavulich/fix/fix_sar - -# Longitude and latitude for center of domain - __esggrid_lon_ctr__="-153.0" - __esggrid_lat_ctr__="61.0" - -# Projected grid spacing in meters...in the static files (e.g. "C768_grid.tile7.nc"), the "dx" is actually the resolution -# of the supergrid, which is HALF of this dx - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - -# Number of x and y points for your domain (halo not included); -# Divide "supergrid" values from /scratch2/BMC/det/kavulich/fix/fix_sar/ak/C768_grid.tile7.halo4.nc by 2 and subtract 8 to eliminate halo - __esggrid_nx__="1344" # Supergrid value 2704 - __esggrid_ny__="1152" # Supergrid value 2320 - -# Rotation of the ESG grid in degrees. - __esggrid_pazi__="0.0" - -# Number of halo points for a wide grid (before trimming)...this should almost always be 6 for now -# Within the model we actually have a 4-point halo and a 3-point halo - __esggrid_wide_halo_width__="6" - -# Side note: FV3 is lagrangian and vertical coordinates are dynamically remapped during model integration -# 'ksplit' is the factor that determines the timestep for this process (divided - -# Physics timestep in seconds, actual dynamics timestep can be a subset of this. -# This is the time step for the largest atmosphere model loop. It corresponds to the frequency with which the -# top-level routine in the dynamics is called as well as the frequency with which the physics is called. -# -# Preliminary standard values: 18 for 3-km runs, 90 for 13-km runs per config_defaults.sh - - __dt_atmos__="${dt_atmos:-18}" - -#Factors for MPI decomposition. esggrid_nx must be divisible by layout_x, esggrid_ny must be divisible by layout_y - __layout_x__="${layout_x:-28}" - __layout_y__="${layout_y:-16}" - -#Take number of points on a tile (nx/lx*ny/ly), must divide by block size to get an integer. -#This integer must be small enough to fit into a processor's cache, so it is machine-dependent magic -# For Theia, must be ~40 or less -# Check setup.sh for more details - __blocksize__="${blocksize:-24}" - -#This section is all for the write component, which you need for output during model integration - if [ "$quilting" = "TRUE" ]; then -#Write component reserves MPI tasks for writing output. The number of "groups" is usually 1, but if you have a case where group 1 is not done writing before the next write step, you need group 2, etc. - __wrtcmp_write_groups__="1" -#Number of tasks per write group. Ny must be divisible my this number. layout_y is usually a good value - __wrtcmp_write_tasks_per_group__="24" -#lambert_conformal or rotated_latlon. lambert_conformal not well tested and probably doesn't work for our purposes - __wrtcmp_output_grid__="lambert_conformal" -#These should always be set the same as compute grid - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" -#Write component grid must always be <= compute grid (without haloes) - __wrtcmp_nx__="1344" - __wrtcmp_ny__="1152" -#Lower left latlon (southwest corner) - __wrtcmp_lon_lwr_left__="-177.0" - __wrtcmp_lat_lwr_left__="42.5" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# EMC's Hawaii grid. -# -#----------------------------------------------------------------------- -# - "EMC_HI") - - __grid_gen_method__="ESGgrid" - -# Values taken from pre-generated files in /scratch4/NCEPDEV/fv3-cam/save/Benjamin.Blake/regional_workflow/fix/fix_sar/hi/C768_grid.tile7.nc -# With move to Hera, those files were lost; a backup can be found here: /scratch2/BMC/det/kavulich/fix/fix_sar -# Longitude and latitude for center of domain - __esggrid_lon_ctr__="-157.0" - __esggrid_lat_ctr__="20.0" - -# Projected grid spacing in meters...in the static files (e.g. "C768_grid.tile7.nc"), the "dx" is actually the resolution -# of the supergrid, which is HALF of this dx (plus or minus some grid stretch factor) - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - -# Number of x and y points for your domain (halo not included); -# Divide "supergrid" values from /scratch2/BMC/det/kavulich/fix/fix_sar/hi/C768_grid.tile7.halo4.nc by 2 and subtract 8 to eliminate halo - __esggrid_nx__="432" # Supergrid value 880 - __esggrid_ny__="360" # Supergrid value 736 - -# Rotation of the ESG grid in degrees. - __esggrid_pazi__="0.0" - -# Number of halo points for a wide grid (before trimming)...this should almost always be 6 for now -# Within the model we actually have a 4-point halo and a 3-point halo - __esggrid_wide_halo_width__="6" - -# Side note: FV3 is lagrangian and vertical coordinates are dynamically remapped during model integration -# 'ksplit' is the factor that determines the timestep for this process (divided - -# Physics timestep in seconds, actual dynamics timestep can be a subset of this. -# This is the time step for the largest atmosphere model loop. It corresponds to the frequency with which the -# top-level routine in the dynamics is called as well as the frequency with which the physics is called. -# -# Preliminary standard values: 18 for 3-km runs, 90 for 13-km runs per config_defaults.sh - - __dt_atmos__="${dt_atmos:-18}" - -#Factors for MPI decomposition. esggrid_nx must be divisible by layout_x, esggrid_ny must be divisible by layout_y - __layout_x__="${layout_x:-8}" - __layout_y__="${layout_y:-8}" -#Take number of points on a tile (nx/lx*ny/ly), must divide by block size to get an integer. -#This integer must be small enough to fit into a processor's cache, so it is machine-dependent magic -# For Theia, must be ~40 or less -# Check setup.sh for more details - __blocksize__="${blocksize:-27}" - -#This section is all for the write component, which you need for output during model integration - if [ "$quilting" = "TRUE" ]; then -#Write component reserves MPI tasks for writing output. The number of "groups" is usually 1, but if you have a case where group 1 is not done writing before the next write step, you need group 2, etc. - __wrtcmp_write_groups__="1" -#Number of tasks per write group. Ny must be divisible my this number. layout_y is usually a good value - __wrtcmp_write_tasks_per_group__="8" -#lambert_conformal or rotated_latlon. lambert_conformal not well tested and probably doesn't work for our purposes - __wrtcmp_output_grid__="lambert_conformal" -#These should usually be set the same as compute grid - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" -#Write component grid should be close to the ESGgrid values unless you are doing something weird - __wrtcmp_nx__="420" - __wrtcmp_ny__="348" - -#Lower left latlon (southwest corner) - __wrtcmp_lon_lwr_left__="-162.8" - __wrtcmp_lat_lwr_left__="15.2" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# EMC's Puerto Rico grid. -# -#----------------------------------------------------------------------- -# - "EMC_PR") - - __grid_gen_method__="ESGgrid" - -# Values taken from pre-generated files in /scratch4/NCEPDEV/fv3-cam/save/Benjamin.Blake/regional_workflow/fix/fix_sar/pr/C768_grid.tile7.nc -# With move to Hera, those files were lost; a backup can be found here: /scratch2/BMC/det/kavulich/fix/fix_sar -# Longitude and latitude for center of domain - __esggrid_lon_ctr__="-69.0" - __esggrid_lat_ctr__="18.0" - -# Projected grid spacing in meters...in the static files (e.g. "C768_grid.tile7.nc"), the "dx" is actually the resolution -# of the supergrid, which is HALF of this dx (plus or minus some grid stretch factor) - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - -# Number of x and y points for your domain (halo not included); -# Divide "supergrid" values from /scratch2/BMC/det/kavulich/fix/fix_sar/pr/C768_grid.tile7.halo4.nc by 2 and subtract 8 to eliminate halo - __esggrid_nx__="576" # Supergrid value 1168 - __esggrid_ny__="432" # Supergrid value 880 - -# Rotation of the ESG grid in degrees. - __esggrid_pazi__="0.0" - -# Number of halo points for a wide grid (before trimming)...this should almost always be 6 for now -# Within the model we actually have a 4-point halo and a 3-point halo - __esggrid_wide_halo_width__="6" - -# Side note: FV3 is lagrangian and vertical coordinates are dynamically remapped during model integration -# 'ksplit' is the factor that determines the timestep for this process (divided - -# Physics timestep in seconds, actual dynamics timestep can be a subset of this. -# This is the time step for the largest atmosphere model loop. It corresponds to the frequency with which the -# top-level routine in the dynamics is called as well as the frequency with which the physics is called. -# -# Preliminary standard values: 18 for 3-km runs, 90 for 13-km runs per config_defaults.sh - - __dt_atmos__="${dt_atmos:-18}" - -#Factors for MPI decomposition. esggrid_nx must be divisible by layout_x, esggrid_ny must be divisible by layout_y - __layout_x__="${layout_x:-16}" - __layout_y__="${layout_y:-8}" - -#Take number of points on a tile (nx/lx*ny/ly), must divide by block size to get an integer. -#This integer must be small enough to fit into a processor's cache, so it is machine-dependent magic -# For Theia, must be ~40 or less -# Check setup.sh for more details - __blocksize__="${blocksize:-24}" - -#This section is all for the write component, which you need for output during model integration - if [ "$quilting" = "TRUE" ]; then -#Write component reserves MPI tasks for writing output. The number of "groups" is usually 1, but if you have a case where group 1 is not done writing before the next write step, you need group 2, etc. - __wrtcmp_write_groups__="1" -#Number of tasks per write group. Ny must be divisible my this number. layout_y is usually a good value - __wrtcmp_write_tasks_per_group__="24" -#lambert_conformal or rotated_latlon. lambert_conformal not well tested and probably doesn't work for our purposes - __wrtcmp_output_grid__="lambert_conformal" -#These should always be set the same as compute grid - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" -#Write component grid must always be <= compute grid (without haloes) - __wrtcmp_nx__="576" - __wrtcmp_ny__="432" -#Lower left latlon (southwest corner) - __wrtcmp_lon_lwr_left__="-77" - __wrtcmp_lat_lwr_left__="12" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# EMC's Guam grid. -# -#----------------------------------------------------------------------- -# - "EMC_GU") - - __grid_gen_method__="ESGgrid" - -# Values taken from pre-generated files in /scratch4/NCEPDEV/fv3-cam/save/Benjamin.Blake/regional_workflow/fix/fix_sar/guam/C768_grid.tile7.nc -# With move to Hera, those files were lost; a backup can be found here: /scratch2/BMC/det/kavulich/fix/fix_sar -# Longitude and latitude for center of domain - __esggrid_lon_ctr__="146.0" - __esggrid_lat_ctr__="15.0" - -# Projected grid spacing in meters...in the static files (e.g. "C768_grid.tile7.nc"), the "dx" is actually the resolution -# of the supergrid, which is HALF of this dx (plus or minus some grid stretch factor) - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - -# Number of x and y points for your domain (halo not included); -# Divide "supergrid" values from /scratch2/BMC/det/kavulich/fix/fix_sar/guam/C768_grid.tile7.halo4.nc by 2 and subtract 8 to eliminate halo - __esggrid_nx__="432" # Supergrid value 880 - __esggrid_ny__="360" # Supergrid value 736 - -# Rotation of the ESG grid in degrees. - __esggrid_pazi__="0.0" - -# Number of halo points for a wide grid (before trimming)...this should almost always be 6 for now -# Within the model we actually have a 4-point halo and a 3-point halo - __esggrid_wide_halo_width__="6" - -# Side note: FV3 is lagrangian and vertical coordinates are dynamically remapped during model integration -# 'ksplit' is the factor that determines the timestep for this process (divided - -# Physics timestep in seconds, actual dynamics timestep can be a subset of this. -# This is the time step for the largest atmosphere model loop. It corresponds to the frequency with which the -# top-level routine in the dynamics is called as well as the frequency with which the physics is called. -# -# Preliminary standard values: 18 for 3-km runs, 90 for 13-km runs per config_defaults.sh - - __dt_atmos__="${dt_atmos:-18}" - -#Factors for MPI decomposition. esggrid_nx must be divisible by layout_x, esggrid_ny must be divisible by layout_y - __layout_x__="${layout_x:-16}" - __layout_y__="${layout_y:-12}" -#Take number of points on a tile (nx/lx*ny/ly), must divide by block size to get an integer. -#This integer must be small enough to fit into a processor's cache, so it is machine-dependent magic -# For Theia, must be ~40 or less -# Check setup.sh for more details - __blocksize__="${blocksize:-27}" - -#This section is all for the write component, which you need for output during model integration - if [ "$quilting" = "TRUE" ]; then -#Write component reserves MPI tasks for writing output. The number of "groups" is usually 1, but if you have a case where group 1 is not done writing before the next write step, you need group 2, etc. - __wrtcmp_write_groups__="1" -#Number of tasks per write group. Ny must be divisible my this number. layout_y is usually a good value - __wrtcmp_write_tasks_per_group__="24" -#lambert_conformal or rotated_latlon. lambert_conformal not well tested and probably doesn't work for our purposes - __wrtcmp_output_grid__="lambert_conformal" -#These should always be set the same as compute grid - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" -#Write component grid must always be <= compute grid (without haloes) - __wrtcmp_nx__="420" - __wrtcmp_ny__="348" -#Lower left latlon (southwest corner) Used /scratch2/NCEPDEV/fv3-cam/Dusan.Jovic/dbrowse/fv3grid utility to find best value - __wrtcmp_lon_lwr_left__="140" - __wrtcmp_lat_lwr_left__="10" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# Emulation of the HAFS v0.A grid at 25 km. -# -#----------------------------------------------------------------------- -# - "GSL_HAFSV0.A_25km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-62.0" - __esggrid_lat_ctr__="22.0" - - __esggrid_delx__="25000.0" - __esggrid_dely__="25000.0" - - __esggrid_nx__="345" - __esggrid_ny__="230" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-300}" - - __layout_x__="${layout_x:-5}" - __layout_y__="${layout_y:-5}" - __blocksize__="${blocksize:-6}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="32" - __wrtcmp_output_grid__="regional_latlon" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="25.0" - __wrtcmp_lon_lwr_left__="-114.5" - __wrtcmp_lat_lwr_left__="-5.0" - __wrtcmp_lon_upr_rght__="-9.5" - __wrtcmp_lat_upr_rght__="55.0" - __wrtcmp_dlon__="0.25" - __wrtcmp_dlat__="0.25" - fi - ;; -# -#----------------------------------------------------------------------- -# -# Emulation of the HAFS v0.A grid at 13 km. -# -#----------------------------------------------------------------------- -# - "GSL_HAFSV0.A_13km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-62.0" - __esggrid_lat_ctr__="22.0" - - __esggrid_delx__="13000.0" - __esggrid_dely__="13000.0" - - __esggrid_nx__="665" - __esggrid_ny__="444" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-180}" - - __layout_x__="${layout_x:-19}" - __layout_y__="${layout_y:-12}" - __blocksize__="${blocksize:-35}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="32" - __wrtcmp_output_grid__="regional_latlon" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="25.0" - __wrtcmp_lon_lwr_left__="-114.5" - __wrtcmp_lat_lwr_left__="-5.0" - __wrtcmp_lon_upr_rght__="-9.5" - __wrtcmp_lat_upr_rght__="55.0" - __wrtcmp_dlon__="0.13" - __wrtcmp_dlat__="0.13" - fi - ;; -# -#----------------------------------------------------------------------- -# -# Emulation of the HAFS v0.A grid at 3 km. -# -#----------------------------------------------------------------------- -# - "GSL_HAFSV0.A_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-62.0" - __esggrid_lat_ctr__="22.0" - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="2880" - __esggrid_ny__="1920" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-40}" - - __layout_x__="${layout_x:-32}" - __layout_y__="${layout_y:-24}" - __blocksize__="${blocksize:-32}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="32" - __wrtcmp_output_grid__="regional_latlon" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="25.0" - __wrtcmp_lon_lwr_left__="-114.5" - __wrtcmp_lat_lwr_left__="-5.0" - __wrtcmp_lon_upr_rght__="-9.5" - __wrtcmp_lat_upr_rght__="55.0" - __wrtcmp_dlon__="0.03" - __wrtcmp_dlat__="0.03" - fi - ;; -# -#----------------------------------------------------------------------- -# -# 50-km HRRR Alaska grid. -# -#----------------------------------------------------------------------- -# - "GSD_HRRR_AK_50km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-163.5" - __esggrid_lat_ctr__="62.8" - - __esggrid_delx__="50000.0" - __esggrid_dely__="50000.0" - - __esggrid_nx__="74" - __esggrid_ny__="51" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-600}" - - __layout_x__="${layout_x:-2}" - __layout_y__="${layout_y:-3}" - __blocksize__="${blocksize:-37}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="1" - __wrtcmp_output_grid__="lambert_conformal" - __wrtcmp_cen_lon__="${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat1__="${__esggrid_lat_ctr__}" - __wrtcmp_stdlat2__="${__esggrid_lat_ctr__}" - __wrtcmp_nx__="70" - __wrtcmp_ny__="45" - __wrtcmp_lon_lwr_left__="172.0" - __wrtcmp_lat_lwr_left__="49.0" - __wrtcmp_dx__="${__esggrid_delx__}" - __wrtcmp_dy__="${__esggrid_dely__}" - fi - ;; -# -#----------------------------------------------------------------------- -# -# Emulation of GSD's RAP domain with ~13km cell size. -# -#----------------------------------------------------------------------- -# - "RRFS_NA_13km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__="-112.5" - __esggrid_lat_ctr__="55.0" - - __esggrid_delx__="13000.0" - __esggrid_dely__="13000.0" - - __esggrid_nx__="912" - __esggrid_ny__="623" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-50}" - - __layout_x__="${layout_x:-16}" - __layout_y__="${layout_y:-16}" - __blocksize__="${blocksize:-30}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="16" - __wrtcmp_output_grid__="rotated_latlon" - __wrtcmp_cen_lon__="-113.0" #"${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="55.0" #"${__esggrid_lat_ctr__}" - __wrtcmp_lon_lwr_left__="-61.0" - __wrtcmp_lat_lwr_left__="-37.0" - __wrtcmp_lon_upr_rght__="61.0" - __wrtcmp_lat_upr_rght__="37.0" - __wrtcmp_dlon__=$( printf "%.9f" $( bc -l <<< "(${__esggrid_delx__}/${radius_Earth})*${degs_per_radian}" ) ) - __wrtcmp_dlat__=$( printf "%.9f" $( bc -l <<< "(${__esggrid_dely__}/${radius_Earth})*${degs_per_radian}" ) ) - fi - ;; -# -#----------------------------------------------------------------------- -# -# Future operational RRFS domain with ~3km cell size. -# -#----------------------------------------------------------------------- -# - "RRFS_NA_3km") - - __grid_gen_method__="ESGgrid" - - __esggrid_lon_ctr__=-112.5 - __esggrid_lat_ctr__=55.0 - - __esggrid_delx__="3000.0" - __esggrid_dely__="3000.0" - - __esggrid_nx__="3950" - __esggrid_ny__="2700" - - __esggrid_pazi__="0.0" - - __esggrid_wide_halo_width__="6" - - __dt_atmos__="${dt_atmos:-36}" - - __layout_x__="${layout_x:-20}" # 40 - EMC operational configuration - __layout_y__="${layout_y:-35}" # 45 - EMC operational configuration - __blocksize__="${blocksize:-28}" - - if [ "$quilting" = "TRUE" ]; then - __wrtcmp_write_groups__="1" - __wrtcmp_write_tasks_per_group__="144" - __wrtcmp_output_grid__="rotated_latlon" - __wrtcmp_cen_lon__="-113.0" #"${__esggrid_lon_ctr__}" - __wrtcmp_cen_lat__="55.0" #"${__esggrid_lat_ctr__}" - __wrtcmp_lon_lwr_left__="-61.0" - __wrtcmp_lat_lwr_left__="-37.0" - __wrtcmp_lon_upr_rght__="61.0" - __wrtcmp_lat_upr_rght__="37.0" - __wrtcmp_dlon__="0.025" #$( printf "%.9f" $( bc -l <<< "(${__esggrid_delx__}/${radius_Earth})*${degs_per_radian}" ) ) - __wrtcmp_dlat__="0.025" #$( printf "%.9f" $( bc -l <<< "(${__esggrid_dely__}/${radius_Earth})*${degs_per_radian}" ) ) - fi - ;; - - esac -# -#----------------------------------------------------------------------- -# -# Use the printf utility with the -v flag to set this function's output -# variables. Note that each of these is set only if the corresponding -# input variable specifying the name to use for the output variable is -# not empty. -# -#----------------------------------------------------------------------- -# - if [ ! -z "${outvarname_grid_gen_method}" ]; then - printf -v ${outvarname_grid_gen_method} "%s" "${__grid_gen_method__}" - fi - - if [ ! -z "${outvarname_esggrid_lon_ctr}" ]; then - printf -v ${outvarname_esggrid_lon_ctr} "%s" "${__esggrid_lon_ctr__}" - fi - - if [ ! -z "${outvarname_esggrid_lat_ctr}" ]; then - printf -v ${outvarname_esggrid_lat_ctr} "%s" "${__esggrid_lat_ctr__}" - fi - - if [ ! -z "${outvarname_esggrid_delx}" ]; then - printf -v ${outvarname_esggrid_delx} "%s" "${__esggrid_delx__}" - fi - - if [ ! -z "${outvarname_esggrid_dely}" ]; then - printf -v ${outvarname_esggrid_dely} "%s" "${__esggrid_dely__}" - fi - - if [ ! -z "${outvarname_esggrid_nx}" ]; then - printf -v ${outvarname_esggrid_nx} "%s" "${__esggrid_nx__}" - fi - - if [ ! -z "${outvarname_esggrid_ny}" ]; then - printf -v ${outvarname_esggrid_ny} "%s" "${__esggrid_ny__}" - fi - - if [ ! -z "${outvarname_esggrid_pazi}" ]; then - printf -v ${outvarname_esggrid_pazi} "%s" "${__esggrid_pazi__}" - fi - - if [ ! -z "${outvarname_esggrid_wide_halo_width}" ]; then - printf -v ${outvarname_esggrid_wide_halo_width} "%s" "${__esggrid_wide_halo_width__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_lon_t6_ctr}" ]; then - printf -v ${outvarname_gfdlgrid_lon_t6_ctr} "%s" "${__gfdlgrid_lon_t6_ctr__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_lat_t6_ctr}" ]; then - printf -v ${outvarname_gfdlgrid_lat_t6_ctr} "%s" "${__gfdlgrid_lat_t6_ctr__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_stretch_fac}" ]; then - printf -v ${outvarname_gfdlgrid_stretch_fac} "%s" "${__gfdlgrid_stretch_fac__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_num_cells}" ]; then - printf -v ${outvarname_gfdlgrid_num_cells} "%s" "${__gfdlgrid_num_cells__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_refine_ratio}" ]; then - printf -v ${outvarname_gfdlgrid_refine_ratio} "%s" "${__gfdlgrid_refine_ratio__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_istart_of_rgnl_dom_on_t6g}" ]; then - printf -v ${outvarname_gfdlgrid_istart_of_rgnl_dom_on_t6g} "%s" "${__gfdlgrid_istart_of_rgnl_dom_on_t6g__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_iend_of_rgnl_dom_on_t6g}" ]; then - printf -v ${outvarname_gfdlgrid_iend_of_rgnl_dom_on_t6g} "%s" "${__gfdlgrid_iend_of_rgnl_dom_on_t6g__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_jstart_of_rgnl_dom_on_t6g}" ]; then - printf -v ${outvarname_gfdlgrid_jstart_of_rgnl_dom_on_t6g} "%s" "${__gfdlgrid_jstart_of_rgnl_dom_on_t6g__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_jend_of_rgnl_dom_on_t6g}" ]; then - printf -v ${outvarname_gfdlgrid_jend_of_rgnl_dom_on_t6g} "%s" "${__gfdlgrid_jend_of_rgnl_dom_on_t6g__}" - fi - - if [ ! -z "${outvarname_gfdlgrid_use_num_cells_in_filenames}" ]; then - printf -v ${outvarname_gfdlgrid_use_num_cells_in_filenames} "%s" "${__gfdlgrid_use_num_cells_in_filenames__}" - fi - - if [ ! -z "${outvarname_dt_atmos}" ]; then - printf -v ${outvarname_dt_atmos} "%s" "${__dt_atmos__}" - fi - - if [ ! -z "${outvarname_layout_x}" ]; then - printf -v ${outvarname_layout_x} "%s" "${__layout_x__}" - fi - - if [ ! -z "${outvarname_layout_y}" ]; then - printf -v ${outvarname_layout_y} "%s" "${__layout_y__}" - fi - - if [ ! -z "${outvarname_blocksize}" ]; then - printf -v ${outvarname_blocksize} "%s" "${__blocksize__}" - fi - - if [ ! -z "${outvarname_wrtcmp_write_groups}" ]; then - printf -v ${outvarname_wrtcmp_write_groups} "%s" "${__wrtcmp_write_groups__}" - fi - - if [ ! -z "${outvarname_wrtcmp_write_tasks_per_group}" ]; then - printf -v ${outvarname_wrtcmp_write_tasks_per_group} "%s" "${__wrtcmp_write_tasks_per_group__}" - fi - - if [ ! -z "${outvarname_wrtcmp_output_grid}" ]; then - printf -v ${outvarname_wrtcmp_output_grid} "%s" "${__wrtcmp_output_grid__}" - fi - - if [ ! -z "${outvarname_wrtcmp_cen_lon}" ]; then - printf -v ${outvarname_wrtcmp_cen_lon} "%s" "${__wrtcmp_cen_lon__}" - fi - - if [ ! -z "${outvarname_wrtcmp_cen_lat}" ]; then - printf -v ${outvarname_wrtcmp_cen_lat} "%s" "${__wrtcmp_cen_lat__}" - fi - - if [ ! -z "${outvarname_wrtcmp_stdlat1}" ]; then - printf -v ${outvarname_wrtcmp_stdlat1} "%s" "${__wrtcmp_stdlat1__}" - fi - - if [ ! -z "${outvarname_wrtcmp_stdlat2}" ]; then - printf -v ${outvarname_wrtcmp_stdlat2} "%s" "${__wrtcmp_stdlat2__}" - fi - - if [ ! -z "${outvarname_wrtcmp_nx}" ]; then - printf -v ${outvarname_wrtcmp_nx} "%s" "${__wrtcmp_nx__}" - fi - - if [ ! -z "${outvarname_wrtcmp_ny}" ]; then - printf -v ${outvarname_wrtcmp_ny} "%s" "${__wrtcmp_ny__}" - fi - - if [ ! -z "${outvarname_wrtcmp_lon_lwr_left}" ]; then - printf -v ${outvarname_wrtcmp_lon_lwr_left} "%s" "${__wrtcmp_lon_lwr_left__}" - fi - - if [ ! -z "${outvarname_wrtcmp_lat_lwr_left}" ]; then - printf -v ${outvarname_wrtcmp_lat_lwr_left} "%s" "${__wrtcmp_lat_lwr_left__}" - fi - - if [ ! -z "${outvarname_wrtcmp_lon_upr_rght}" ]; then - printf -v ${outvarname_wrtcmp_lon_upr_rght} "%s" "${__wrtcmp_lon_upr_rght__}" - fi - - if [ ! -z "${outvarname_wrtcmp_lat_upr_rght}" ]; then - printf -v ${outvarname_wrtcmp_lat_upr_rght} "%s" "${__wrtcmp_lat_upr_rght__}" - fi - - if [ ! -z "${outvarname_wrtcmp_dx}" ]; then - printf -v ${outvarname_wrtcmp_dx} "%s" "${__wrtcmp_dx__}" - fi - - if [ ! -z "${outvarname_wrtcmp_dy}" ]; then - printf -v ${outvarname_wrtcmp_dy} "%s" "${__wrtcmp_dy__}" - fi - - if [ ! -z "${outvarname_wrtcmp_dlon}" ]; then - printf -v ${outvarname_wrtcmp_dlon} "%s" "${__wrtcmp_dlon__}" - fi - - if [ ! -z "${outvarname_wrtcmp_dlat}" ]; then - printf -v ${outvarname_wrtcmp_dlat} "%s" "${__wrtcmp_dlat__}" - fi -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/func- -# tion. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} diff --git a/ush/set_thompson_mp_fix_files.sh b/ush/set_thompson_mp_fix_files.sh deleted file mode 100644 index 7bad26c2bb..0000000000 --- a/ush/set_thompson_mp_fix_files.sh +++ /dev/null @@ -1,192 +0,0 @@ -# -#----------------------------------------------------------------------- -# -# This file defines a function that first checks whether the Thompson -# microphysics parameterization is being called by the selected physics -# suite. If not, it sets the output variable whose name is specified by -# output_varname_sdf_uses_thompson_mp to "FALSE" and exits. If so, it -# sets this variable to "TRUE" and modifies the workflow arrays -# FIXgsm_FILES_TO_COPY_TO_FIXam and CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING -# to ensure that fixed files needed by the Thompson microphysics -# parameterization are copied to the FIXam directory and that appropriate -# symlinks to these files are created in the run directories. -# -#----------------------------------------------------------------------- -# -function set_thompson_mp_fix_files() { -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# - { save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# - local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) - local scrfunc_fn=$( basename "${scrfunc_fp}" ) - local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# - local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# Specify the set of valid argument names for this script/function. Then -# process the arguments provided to this script/function (which should -# consist of a set of name-value pairs of the form arg1="value1", etc). -# -#----------------------------------------------------------------------- -# - local valid_args=( \ - "ccpp_phys_suite_fp" \ - "thompson_mp_climo_fn" \ - "output_varname_sdf_uses_thompson_mp" \ - ) - process_args valid_args "$@" -# -#----------------------------------------------------------------------- -# -# For debugging purposes, print out values of arguments passed to this -# script. Note that these will be printed out only if VERBOSE is set to -# TRUE. -# -#----------------------------------------------------------------------- -# - print_input_args valid_args -# -#----------------------------------------------------------------------- -# -# Declare local variables. -# -#----------------------------------------------------------------------- -# - local thompson_mp_name \ - regex_search \ - thompson_mp_name_or_null \ - sdf_uses_thompson_mp \ - thompson_mp_fix_files \ - num_files \ - mapping \ - msg -# -#----------------------------------------------------------------------- -# -# Check the suite definition file to see whether the Thompson microphysics -# parameterization is being used. -# -#----------------------------------------------------------------------- -# - thompson_mp_name="mp_thompson" - regex_search="^[ ]*(${thompson_mp_name})<\/scheme>[ ]*$" - thompson_mp_name_or_null=$( $SED -r -n -e "s/${regex_search}/\1/p" "${ccpp_phys_suite_fp}" ) - - if [ "${thompson_mp_name_or_null}" = "${thompson_mp_name}" ]; then - sdf_uses_thompson_mp="TRUE" - elif [ -z "${thompson_mp_name_or_null}" ]; then - sdf_uses_thompson_mp="FALSE" - else - print_err_msg_exit "\ -Unexpected value returned for thompson_mp_name_or_null: - thompson_mp_name_or_null = \"${thompson_mp_name_or_null}\" -This variable should be set to either \"${thompson_mp_name}\" or an empty -string." - fi -# -#----------------------------------------------------------------------- -# -# If the Thompson microphysics parameterization is being used, then... -# -#----------------------------------------------------------------------- -# - if [ "${sdf_uses_thompson_mp}" = "TRUE" ]; then -# -#----------------------------------------------------------------------- -# -# Append the names of the fixed files needed by the Thompson microphysics -# parameterization to the workflow array FIXgsm_FILES_TO_COPY_TO_FIXam, -# and append to the workflow array CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING -# the mappings between these files and the names of the corresponding -# symlinks that need to be created in the run directories. -# -#----------------------------------------------------------------------- -# - thompson_mp_fix_files=( \ - "CCN_ACTIVATE.BIN" \ - "freezeH2O.dat" \ - "qr_acr_qg.dat" \ - "qr_acr_qs.dat" \ - "qr_acr_qgV2.dat" \ - "qr_acr_qsV2.dat" \ - ) - - if [ "${EXTRN_MDL_NAME_ICS}" != "HRRR" -a "${EXTRN_MDL_NAME_ICS}" != "RAP" ] || \ - [ "${EXTRN_MDL_NAME_LBCS}" != "HRRR" -a "${EXTRN_MDL_NAME_LBCS}" != "RAP" ]; then - thompson_mp_fix_files+=( "${thompson_mp_climo_fn}" ) - fi - - FIXgsm_FILES_TO_COPY_TO_FIXam+=( "${thompson_mp_fix_files[@]}" ) - - num_files=${#thompson_mp_fix_files[@]} - for (( i=0; i<${num_files}; i++ )); do - mapping="${thompson_mp_fix_files[i]} | ${thompson_mp_fix_files[i]}" - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING+=( "${mapping}" ) - done - - msg=" -Since the Thompson microphysics parameterization is being used by this -physics suite (CCPP_PHYS_SUITE), the names of the fixed files needed by -this scheme have been appended to the array FIXgsm_FILES_TO_COPY_TO_FIXam, -and the mappings between these files and the symlinks that need to be -created in the cycle directories have been appended to the array -CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING. After these modifications, the -values of these parameters are as follows: - - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\" - - FIXgsm_FILES_TO_COPY_TO_FIXam = ( \\ -" - msg="$msg"$( printf "\"%s\" \\\\\n" "${FIXgsm_FILES_TO_COPY_TO_FIXam[@]}" ) - msg="$msg"$( printf "\n)" ) - msg="$msg - - CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING = ( \\ -" - msg="$msg"$( printf "\"%s\" \\\\\n" "${CYCLEDIR_LINKS_TO_FIXam_FILES_MAPPING[@]}" ) - msg="$msg"$( printf "\n)" ) - print_info_msg "$msg" - - fi -# -#----------------------------------------------------------------------- -# -# Set output variables. -# -#----------------------------------------------------------------------- -# - eval ${output_varname_sdf_uses_thompson_mp}="${sdf_uses_thompson_mp}" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the beginning of this script/function. -# -#----------------------------------------------------------------------- -# - { restore_shell_opts; } > /dev/null 2>&1 - -} - diff --git a/ush/setup.py b/ush/setup.py index 06bf3dd535..7a3cb032f0 100644 --- a/ush/setup.py +++ b/ush/setup.py @@ -7,7 +7,6 @@ from python_utils import cd_vrfy, mkdir_vrfy, rm_vrfy, check_var_valid_value,\ lowercase,uppercase,check_for_preexist_dir_file,\ - list_to_str, type_to_str, \ import_vars, export_vars, get_env_var, print_info_msg,\ print_err_msg_exit, load_config_file, cfg_to_shell_str,\ load_shell_config, load_ini_config, get_ini_value @@ -58,8 +57,8 @@ def setup(): # #----------------------------------------------------------------------- # - EXPT_DEFAULT_CONFIG_FN="config_defaults.yaml" - cfg_d = load_config_file(EXPT_DEFAULT_CONFIG_FN) + EXPT_DEFAULT_CONFIG_FN="config_defaults.sh" + cfg_d = load_config_file(os.path.join(ushdir,EXPT_DEFAULT_CONFIG_FN)) import_vars(dictionary=cfg_d) # #----------------------------------------------------------------------- @@ -91,6 +90,14 @@ def setup(): not defined in the default configuration file {EXPT_DEFAULT_CONFIG_FN}''') import_vars(dictionary=cfg_u) + # + #----------------------------------------------------------------------- + # Source constants.sh and save its contents to a variable for later + #----------------------------------------------------------------------- + # + cfg_c=load_config_file(os.path.join(ushdir,CONSTANTS_FN)) + const_lines=cfg_to_shell_str(cfg_c) + import_vars(dictionary=cfg_c) # #----------------------------------------------------------------------- # @@ -99,13 +106,12 @@ def setup(): # #----------------------------------------------------------------------- # - # export env vars before calling another module export_vars() if PREDEF_GRID_NAME: set_predef_grid_params() - + import_vars() # @@ -119,7 +125,7 @@ def setup(): if DEBUG and not VERBOSE: print_info_msg(''' Resetting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"...''') - VERBOSE=False + VERBOSE=True # #----------------------------------------------------------------------- @@ -924,17 +930,12 @@ def setup(): NEMS_CONFIG_FN = "nems.configure" #---------------------------------- - if DATA_TABLE_TMPL_FN is None: - DATA_TABLE_TMPL_FN = DATA_TABLE_FN - if DIAG_TABLE_TMPL_FN is None: - DIAG_TABLE_TMPL_FN = f"{DIAG_TABLE_FN}{dot_ccpp_phys_suite_or_null}" - if FIELD_TABLE_TMPL_FN is None: - FIELD_TABLE_TMPL_FN = f"{FIELD_TABLE_FN}{dot_ccpp_phys_suite_or_null}" - if MODEL_CONFIG_TMPL_FN is None: - MODEL_CONFIG_TMPL_FN = MODEL_CONFIG_FN - if NEMS_CONFIG_TMPL_FN is None: - NEMS_CONFIG_TMPL_FN = NEMS_CONFIG_FN - + DATA_TABLE_TMPL_FN = DATA_TABLE_TMPL_FN or DATA_TABLE_FN + DIAG_TABLE_TMPL_FN = f"{DIAG_TABLE_TMPL_FN or DIAG_TABLE_FN}{dot_ccpp_phys_suite_or_null}" + FIELD_TABLE_TMPL_FN = f"{FIELD_TABLE_TMPL_FN or FIELD_TABLE_FN}{dot_ccpp_phys_suite_or_null}" + MODEL_CONFIG_TMPL_FN = MODEL_CONFIG_TMPL_FN or MODEL_CONFIG_FN + NEMS_CONFIG_TMPL_FN = NEMS_CONFIG_TMPL_FN or NEMS_CONFIG_FN + DATA_TABLE_TMPL_FP = os.path.join(TEMPLATE_DIR,DATA_TABLE_TMPL_FN) DIAG_TABLE_TMPL_FP = os.path.join(TEMPLATE_DIR,DIAG_TABLE_TMPL_FN) FIELD_TABLE_TMPL_FP = os.path.join(TEMPLATE_DIR,FIELD_TABLE_TMPL_FN) @@ -1414,55 +1415,11 @@ def setup(): set_extrn_mdl_params() - IMPORTS = ["EXTRN_MDL_SYSBASEDIR_ICS", "EXTRN_MDL_SYSBASEDIR_LBCS", "EXTRN_MDL_LBCS_OFFSET_HRS"] + IMPORTS = ["EXTRN_MDL_LBCS_OFFSET_HRS"] import_vars(env_vars=IMPORTS) # #----------------------------------------------------------------------- # - # Any regional model must be supplied lateral boundary conditions (in - # addition to initial conditions) to be able to perform a forecast. In - # the FV3-LAM model, these boundary conditions (BCs) are supplied using a - # "halo" of grid cells around the regional domain that extend beyond the - # boundary of the domain. The model is formulated such that along with - # files containing these BCs, it needs as input the following files (in - # NetCDF format): - # - # 1) A grid file that includes a halo of 3 cells beyond the boundary of - # the domain. - # 2) A grid file that includes a halo of 4 cells beyond the boundary of - # the domain. - # 3) A (filtered) orography file without a halo, i.e. a halo of width - # 0 cells. - # 4) A (filtered) orography file that includes a halo of 4 cells beyond - # the boundary of the domain. - # - # Note that the regional grid is referred to as "tile 7" in the code. - # We will let: - # - # * NH0 denote the width (in units of number of cells on tile 7) of - # the 0-cell-wide halo, i.e. NH0 = 0; - # - # * NH3 denote the width (in units of number of cells on tile 7) of - # the 3-cell-wide halo, i.e. NH3 = 3; and - # - # * NH4 denote the width (in units of number of cells on tile 7) of - # the 4-cell-wide halo, i.e. NH4 = 4. - # - # We define these variables next. - # - #----------------------------------------------------------------------- - # - global NH0,NH3,NH4 - NH0=0 - NH3=3 - NH4=4 - - # export env vars - EXPORTS = ["NH0","NH3","NH4"] - export_vars(env_vars = EXPORTS) - # - #----------------------------------------------------------------------- - # # Set parameters according to the type of horizontal grid generation # method specified. First consider GFDL's global-parent-grid based # method. @@ -1490,7 +1447,7 @@ def setup(): set_gridparams_GFDLgrid( \ lon_of_t6_ctr=GFDLgrid_LON_T6_CTR, \ lat_of_t6_ctr=GFDLgrid_LAT_T6_CTR, \ - res_of_t6g=GFDLgrid_RES, \ + res_of_t6g=GFDLgrid_NUM_CELLS, \ stretch_factor=GFDLgrid_STRETCH_FAC, \ refine_ratio_t6g_to_t7g=GFDLgrid_REFINE_RATIO, \ istart_of_t7_on_t6g=GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G, \ @@ -1779,11 +1736,26 @@ def setup(): global GLOBAL_VAR_DEFNS_FP GLOBAL_VAR_DEFNS_FP=os.path.join(EXPTDIR,GLOBAL_VAR_DEFNS_FN) all_lines=cfg_to_shell_str(cfg_d) + with open(GLOBAL_VAR_DEFNS_FP,'w') as f: msg = f""" # + # #----------------------------------------------------------------------- #----------------------------------------------------------------------- # Section 1: + # This section contains definitions of the various constants defined in + # the file {CONSTANTS_FN}. + #----------------------------------------------------------------------- + #----------------------------------------------------------------------- + # + """ + f.write(dedent(msg)) + f.write(const_lines) + + msg = f""" # + #----------------------------------------------------------------------- + #----------------------------------------------------------------------- + # Section 2: # This section contains (most of) the primary experiment variables, i.e. # those variables that are defined in the default configuration file # (config_defaults.sh) and that can be reset via the user-specified @@ -1839,7 +1811,11 @@ def setup(): #----------------------------------------------------------------------- #----------------------------------------------------------------------- # + """ + with open(GLOBAL_VAR_DEFNS_FP,'a') as f: + f.write(dedent(msg)) + settings = { # #----------------------------------------------------------------------- # @@ -1849,9 +1825,9 @@ def setup(): # #----------------------------------------------------------------------- # - WFLOW_LAUNCH_SCRIPT_FP='{WFLOW_LAUNCH_SCRIPT_FP}' - WFLOW_LAUNCH_LOG_FP='{WFLOW_LAUNCH_LOG_FP}' - CRONTAB_LINE='{CRONTAB_LINE}' + 'WFLOW_LAUNCH_SCRIPT_FP': WFLOW_LAUNCH_SCRIPT_FP, + 'WFLOW_LAUNCH_LOG_FP': WFLOW_LAUNCH_LOG_FP, + 'CRONTAB_LINE': CRONTAB_LINE, # #----------------------------------------------------------------------- # @@ -1859,44 +1835,44 @@ def setup(): # #----------------------------------------------------------------------- # - SR_WX_APP_TOP_DIR='{SR_WX_APP_TOP_DIR}' - HOMErrfs='{HOMErrfs}' - USHDIR='{USHDIR}' - SCRIPTSDIR='{SCRIPTSDIR}' - JOBSDIR='{JOBSDIR}' - SORCDIR='{SORCDIR}' - SRC_DIR='{SRC_DIR}' - PARMDIR='{PARMDIR}' - MODULES_DIR='{MODULES_DIR}' - EXECDIR='{EXECDIR}' - FIXam='{FIXam}' - FIXclim='{FIXclim}' - FIXLAM='{FIXLAM}' - FIXgsm='{FIXgsm}' - FIXaer='{FIXaer}' - FIXlut='{FIXlut}' - COMROOT='{COMROOT}' - COMOUT_BASEDIR='{COMOUT_BASEDIR}' - TEMPLATE_DIR='{TEMPLATE_DIR}' - VX_CONFIG_DIR='{VX_CONFIG_DIR}' - METPLUS_CONF='{METPLUS_CONF}' - MET_CONFIG='{MET_CONFIG}' - UFS_WTHR_MDL_DIR='{UFS_WTHR_MDL_DIR}' - UFS_UTILS_DIR='{UFS_UTILS_DIR}' - SFC_CLIMO_INPUT_DIR='{SFC_CLIMO_INPUT_DIR}' - TOPO_DIR='{TOPO_DIR}' - UPP_DIR='{UPP_DIR}' + 'SR_WX_APP_TOP_DIR': SR_WX_APP_TOP_DIR, + 'HOMErrfs': HOMErrfs, + 'USHDIR': USHDIR, + 'SCRIPTSDIR': SCRIPTSDIR, + 'JOBSDIR': JOBSDIR, + 'SORCDIR': SORCDIR, + 'SRC_DIR': SRC_DIR, + 'PARMDIR': PARMDIR, + 'MODULES_DIR': MODULES_DIR, + 'EXECDIR': EXECDIR, + 'FIXam': FIXam, + 'FIXclim': FIXclim, + 'FIXLAM': FIXLAM, + 'FIXgsm': FIXgsm, + 'FIXaer': FIXaer, + 'FIXlut': FIXlut, + 'COMROOT': COMROOT, + 'COMOUT_BASEDIR': COMOUT_BASEDIR, + 'TEMPLATE_DIR': TEMPLATE_DIR, + 'VX_CONFIG_DIR': VX_CONFIG_DIR, + 'METPLUS_CONF': METPLUS_CONF, + 'MET_CONFIG': MET_CONFIG, + 'UFS_WTHR_MDL_DIR': UFS_WTHR_MDL_DIR, + 'UFS_UTILS_DIR': UFS_UTILS_DIR, + 'SFC_CLIMO_INPUT_DIR': SFC_CLIMO_INPUT_DIR, + 'TOPO_DIR': TOPO_DIR, + 'UPP_DIR': UPP_DIR, - EXPTDIR='{EXPTDIR}' - LOGDIR='{LOGDIR}' - CYCLE_BASEDIR='{CYCLE_BASEDIR}' - GRID_DIR='{GRID_DIR}' - OROG_DIR='{OROG_DIR}' - SFC_CLIMO_DIR='{SFC_CLIMO_DIR}' + 'EXPTDIR': EXPTDIR, + 'LOGDIR': LOGDIR, + 'CYCLE_BASEDIR': CYCLE_BASEDIR, + 'GRID_DIR': GRID_DIR, + 'OROG_DIR': OROG_DIR, + 'SFC_CLIMO_DIR': SFC_CLIMO_DIR, - NDIGITS_ENSMEM_NAMES='{NDIGITS_ENSMEM_NAMES}' - ENSMEM_NAMES={list_to_str(ENSMEM_NAMES)} - FV3_NML_ENSMEM_FPS={list_to_str(FV3_NML_ENSMEM_FPS)} + 'NDIGITS_ENSMEM_NAMES': NDIGITS_ENSMEM_NAMES, + 'ENSMEM_NAMES': ENSMEM_NAMES, + 'FV3_NML_ENSMEM_FPS': FV3_NML_ENSMEM_FPS, # #----------------------------------------------------------------------- # @@ -1904,49 +1880,49 @@ def setup(): # #----------------------------------------------------------------------- # - GLOBAL_VAR_DEFNS_FP='{GLOBAL_VAR_DEFNS_FP}' + 'GLOBAL_VAR_DEFNS_FP': GLOBAL_VAR_DEFNS_FP, - DATA_TABLE_FN='{DATA_TABLE_FN}' - DIAG_TABLE_FN='{DIAG_TABLE_FN}' - FIELD_TABLE_FN='{FIELD_TABLE_FN}' - MODEL_CONFIG_FN='{MODEL_CONFIG_FN}' - NEMS_CONFIG_FN='{NEMS_CONFIG_FN}' - - DATA_TABLE_TMPL_FN='{DATA_TABLE_TMPL_FN}' - DIAG_TABLE_TMPL_FN='{DIAG_TABLE_TMPL_FN}' - FIELD_TABLE_TMPL_FN='{FIELD_TABLE_TMPL_FN}' - MODEL_CONFIG_TMPL_FN='{MODEL_CONFIG_TMPL_FN}' - NEMS_CONFIG_TMPL_FN='{NEMS_CONFIG_TMPL_FN}' + 'DATA_TABLE_FN': DATA_TABLE_FN, + 'DIAG_TABLE_FN': DIAG_TABLE_FN, + 'FIELD_TABLE_FN': FIELD_TABLE_FN, + 'MODEL_CONFIG_FN': MODEL_CONFIG_FN, + 'NEMS_CONFIG_FN': NEMS_CONFIG_FN, + + 'DATA_TABLE_TMPL_FN': DATA_TABLE_TMPL_FN, + 'DIAG_TABLE_TMPL_FN': DIAG_TABLE_TMPL_FN, + 'FIELD_TABLE_TMPL_FN': FIELD_TABLE_TMPL_FN, + 'MODEL_CONFIG_TMPL_FN': MODEL_CONFIG_TMPL_FN, + 'NEMS_CONFIG_TMPL_FN': NEMS_CONFIG_TMPL_FN, - DATA_TABLE_TMPL_FP='{DATA_TABLE_TMPL_FP}' - DIAG_TABLE_TMPL_FP='{DIAG_TABLE_TMPL_FP}' - FIELD_TABLE_TMPL_FP='{FIELD_TABLE_TMPL_FP}' - FV3_NML_BASE_SUITE_FP='{FV3_NML_BASE_SUITE_FP}' - FV3_NML_YAML_CONFIG_FP='{FV3_NML_YAML_CONFIG_FP}' - FV3_NML_BASE_ENS_FP='{FV3_NML_BASE_ENS_FP}' - MODEL_CONFIG_TMPL_FP='{MODEL_CONFIG_TMPL_FP}' - NEMS_CONFIG_TMPL_FP='{NEMS_CONFIG_TMPL_FP}' + 'DATA_TABLE_TMPL_FP': DATA_TABLE_TMPL_FP, + 'DIAG_TABLE_TMPL_FP': DIAG_TABLE_TMPL_FP, + 'FIELD_TABLE_TMPL_FP': FIELD_TABLE_TMPL_FP, + 'FV3_NML_BASE_SUITE_FP': FV3_NML_BASE_SUITE_FP, + 'FV3_NML_YAML_CONFIG_FP': FV3_NML_YAML_CONFIG_FP, + 'FV3_NML_BASE_ENS_FP': FV3_NML_BASE_ENS_FP, + 'MODEL_CONFIG_TMPL_FP': MODEL_CONFIG_TMPL_FP, + 'NEMS_CONFIG_TMPL_FP': NEMS_CONFIG_TMPL_FP, - CCPP_PHYS_SUITE_FN='{CCPP_PHYS_SUITE_FN}' - CCPP_PHYS_SUITE_IN_CCPP_FP='{CCPP_PHYS_SUITE_IN_CCPP_FP}' - CCPP_PHYS_SUITE_FP='{CCPP_PHYS_SUITE_FP}' + 'CCPP_PHYS_SUITE_FN': CCPP_PHYS_SUITE_FN, + 'CCPP_PHYS_SUITE_IN_CCPP_FP': CCPP_PHYS_SUITE_IN_CCPP_FP, + 'CCPP_PHYS_SUITE_FP': CCPP_PHYS_SUITE_FP, - FIELD_DICT_FN='{FIELD_DICT_FN}' - FIELD_DICT_IN_UWM_FP='{FIELD_DICT_IN_UWM_FP}' - FIELD_DICT_FP='{FIELD_DICT_FP}' + 'FIELD_DICT_FN': FIELD_DICT_FN, + 'FIELD_DICT_IN_UWM_FP': FIELD_DICT_IN_UWM_FP, + 'FIELD_DICT_FP': FIELD_DICT_FP, - DATA_TABLE_FP='{DATA_TABLE_FP}' - FIELD_TABLE_FP='{FIELD_TABLE_FP}' - FV3_NML_FN='{FV3_NML_FN}' # This may not be necessary... - FV3_NML_FP='{FV3_NML_FP}' - NEMS_CONFIG_FP='{NEMS_CONFIG_FP}' + 'DATA_TABLE_FP': DATA_TABLE_FP, + 'FIELD_TABLE_FP': FIELD_TABLE_FP, + 'FV3_NML_FN': FV3_NML_FN, # This may not be necessary... + 'FV3_NML_FP': FV3_NML_FP, + 'NEMS_CONFIG_FP': NEMS_CONFIG_FP, - FV3_EXEC_FP='{FV3_EXEC_FP}' + 'FV3_EXEC_FP': FV3_EXEC_FP, - LOAD_MODULES_RUN_TASK_FP='{LOAD_MODULES_RUN_TASK_FP}' + 'LOAD_MODULES_RUN_TASK_FP': LOAD_MODULES_RUN_TASK_FP, - THOMPSON_MP_CLIMO_FN='{THOMPSON_MP_CLIMO_FN}' - THOMPSON_MP_CLIMO_FP='{THOMPSON_MP_CLIMO_FP}' + 'THOMPSON_MP_CLIMO_FN': THOMPSON_MP_CLIMO_FN, + 'THOMPSON_MP_CLIMO_FP': THOMPSON_MP_CLIMO_FP, # #----------------------------------------------------------------------- # @@ -1954,7 +1930,7 @@ def setup(): # #----------------------------------------------------------------------- # - RELATIVE_LINK_FLAG='{RELATIVE_LINK_FLAG}' + 'RELATIVE_LINK_FLAG': RELATIVE_LINK_FLAG, # #----------------------------------------------------------------------- # @@ -1963,8 +1939,8 @@ def setup(): # #----------------------------------------------------------------------- # - SDF_USES_RUC_LSM='{type_to_str(SDF_USES_RUC_LSM)}' - SDF_USES_THOMPSON_MP='{type_to_str(SDF_USES_THOMPSON_MP)}' + 'SDF_USES_RUC_LSM': SDF_USES_RUC_LSM, + 'SDF_USES_THOMPSON_MP': SDF_USES_THOMPSON_MP, # #----------------------------------------------------------------------- # @@ -1973,28 +1949,24 @@ def setup(): # #----------------------------------------------------------------------- # - GTYPE='{GTYPE}' - TILE_RGNL='{TILE_RGNL}' - NH0='{NH0}' - NH3='{NH3}' - NH4='{NH4}' + 'GTYPE': GTYPE, + 'TILE_RGNL': TILE_RGNL, - LON_CTR='{LON_CTR}' - LAT_CTR='{LAT_CTR}' - NX='{NX}' - NY='{NY}' - NHW='{NHW}' - STRETCH_FAC='{STRETCH_FAC}' + 'LON_CTR': LON_CTR, + 'LAT_CTR': LAT_CTR, + 'NX': NX, + 'NY': NY, + 'NHW': NHW, + 'STRETCH_FAC': STRETCH_FAC, - RES_IN_FIXLAM_FILENAMES='{RES_IN_FIXLAM_FILENAMES}' + 'RES_IN_FIXLAM_FILENAMES': RES_IN_FIXLAM_FILENAMES, # # If running the make_grid task, CRES will be set to a null string during # the grid generation step. It will later be set to an actual value after # the make_grid task is complete. # - CRES='{CRES}'""" - with open(GLOBAL_VAR_DEFNS_FP,'a') as f: - f.write(dedent(msg)) + 'CRES': CRES + } # #----------------------------------------------------------------------- # @@ -2004,8 +1976,6 @@ def setup(): #----------------------------------------------------------------------- # if GRID_GEN_METHOD == "GFDLgrid": - - msg=f""" # #----------------------------------------------------------------------- # @@ -2019,16 +1989,13 @@ def setup(): # #----------------------------------------------------------------------- # - ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='{ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' - IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='{IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' - JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='{JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' - JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='{JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}'""" - with open(GLOBAL_VAR_DEFNS_FP,'a') as f: - f.write(dedent(msg)) - + settings.update({ + 'ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG': ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG, + 'IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG': IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG, + 'JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG': JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG, + 'JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG': JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG + }) elif GRID_GEN_METHOD == "ESGgrid": - - msg=f""" # #----------------------------------------------------------------------- # @@ -2039,13 +2006,13 @@ def setup(): # #----------------------------------------------------------------------- # - DEL_ANGLE_X_SG='{DEL_ANGLE_X_SG}' - DEL_ANGLE_Y_SG='{DEL_ANGLE_Y_SG}' - NEG_NX_OF_DOM_WITH_WIDE_HALO='{NEG_NX_OF_DOM_WITH_WIDE_HALO}' - NEG_NY_OF_DOM_WITH_WIDE_HALO='{NEG_NY_OF_DOM_WITH_WIDE_HALO}' - PAZI='{PAZI or ''}'""" - with open(GLOBAL_VAR_DEFNS_FP,'a') as f: - f.write(dedent(msg)) + settings.update({ + 'DEL_ANGLE_X_SG': DEL_ANGLE_X_SG, + 'DEL_ANGLE_Y_SG': DEL_ANGLE_Y_SG, + 'NEG_NX_OF_DOM_WITH_WIDE_HALO': NEG_NX_OF_DOM_WITH_WIDE_HALO, + 'NEG_NY_OF_DOM_WITH_WIDE_HALO': NEG_NY_OF_DOM_WITH_WIDE_HALO, + 'PAZI': PAZI or '' + }) # #----------------------------------------------------------------------- # @@ -2054,7 +2021,7 @@ def setup(): # #----------------------------------------------------------------------- # - msg = f""" + settings.update({ # #----------------------------------------------------------------------- # @@ -2063,7 +2030,7 @@ def setup(): # #----------------------------------------------------------------------- # - CPL='{type_to_str(CPL)}' + 'CPL': CPL, # #----------------------------------------------------------------------- # @@ -2072,7 +2039,7 @@ def setup(): # #----------------------------------------------------------------------- # - OZONE_PARAM='{OZONE_PARAM}' + 'OZONE_PARAM': OZONE_PARAM, # #----------------------------------------------------------------------- # @@ -2084,7 +2051,7 @@ def setup(): # #----------------------------------------------------------------------- # - EXTRN_MDL_SYSBASEDIR_ICS='{EXTRN_MDL_SYSBASEDIR_ICS}' + 'EXTRN_MDL_SYSBASEDIR_ICS': EXTRN_MDL_SYSBASEDIR_ICS, # #----------------------------------------------------------------------- # @@ -2096,7 +2063,7 @@ def setup(): # #----------------------------------------------------------------------- # - EXTRN_MDL_SYSBASEDIR_LBCS='{EXTRN_MDL_SYSBASEDIR_LBCS}' + 'EXTRN_MDL_SYSBASEDIR_LBCS': EXTRN_MDL_SYSBASEDIR_LBCS, # #----------------------------------------------------------------------- # @@ -2105,7 +2072,7 @@ def setup(): # #----------------------------------------------------------------------- # - EXTRN_MDL_LBCS_OFFSET_HRS='{EXTRN_MDL_LBCS_OFFSET_HRS}' + 'EXTRN_MDL_LBCS_OFFSET_HRS': EXTRN_MDL_LBCS_OFFSET_HRS, # #----------------------------------------------------------------------- # @@ -2114,7 +2081,7 @@ def setup(): # #----------------------------------------------------------------------- # - LBC_SPEC_FCST_HRS={list_to_str(LBC_SPEC_FCST_HRS)} + 'LBC_SPEC_FCST_HRS': LBC_SPEC_FCST_HRS, # #----------------------------------------------------------------------- # @@ -2123,8 +2090,8 @@ def setup(): # #----------------------------------------------------------------------- # - NUM_CYCLES='{NUM_CYCLES}' - ALL_CDATES={list_to_str(ALL_CDATES)} + 'NUM_CYCLES': NUM_CYCLES, + 'ALL_CDATES': ALL_CDATES, # #----------------------------------------------------------------------- # @@ -2138,9 +2105,9 @@ def setup(): # #----------------------------------------------------------------------- # - USE_FVCOM='{type_to_str(USE_FVCOM)}' - FVCOM_DIR='{FVCOM_DIR}' - FVCOM_FILE='{FVCOM_FILE}' + 'USE_FVCOM': USE_FVCOM, + 'FVCOM_DIR': FVCOM_DIR, + 'FVCOM_FILE': FVCOM_FILE, # #----------------------------------------------------------------------- # @@ -2148,8 +2115,8 @@ def setup(): # #----------------------------------------------------------------------- # - NCORES_PER_NODE='{NCORES_PER_NODE}' - PE_MEMBER01='{PE_MEMBER01}' + 'NCORES_PER_NODE': NCORES_PER_NODE, + 'PE_MEMBER01': PE_MEMBER01, # #----------------------------------------------------------------------- # @@ -2162,17 +2129,24 @@ def setup(): # #----------------------------------------------------------------------- # - N_VAR_SPP='{N_VAR_SPP}' - N_VAR_LNDP='{N_VAR_LNDP}' - LNDP_TYPE='{LNDP_TYPE}' - LNDP_MODEL_TYPE='{LNDP_MODEL_TYPE}' - FHCYC_LSM_SPP_OR_NOT='{FHCYC_LSM_SPP_OR_NOT}' - """ + 'N_VAR_SPP': N_VAR_SPP, + 'N_VAR_LNDP': N_VAR_LNDP, + 'LNDP_TYPE': LNDP_TYPE, + 'LNDP_MODEL_TYPE': LNDP_MODEL_TYPE, + 'FHCYC_LSM_SPP_OR_NOT': FHCYC_LSM_SPP_OR_NOT + }) + # + #----------------------------------------------------------------------- + # + # Now write all settings we collacted so far to var_defns file + # + #----------------------------------------------------------------------- + # with open(GLOBAL_VAR_DEFNS_FP,'a') as f: - f.write(dedent(msg)) + f.write(cfg_to_shell_str(settings)) - # export all vars + # export all global variables back to the environment export_vars() # diff --git a/ush/setup.sh b/ush/setup.sh deleted file mode 100755 index 8ba9eab852..0000000000 --- a/ush/setup.sh +++ /dev/null @@ -1,2853 +0,0 @@ -#!/bin/bash -# -#----------------------------------------------------------------------- -# -# This file defines and then calls a function that sets a secondary set -# of parameters needed by the various scripts that are called by the -# FV3-LAM rocoto community workflow. This secondary set of parameters is -# calculated using the primary set of user-defined parameters in the de- -# fault and custom experiment/workflow configuration scripts (whose file -# names are defined below). This script then saves both sets of parame- -# ters in a global variable definitions file (really a bash script) in -# the experiment directory. This file then gets sourced by the various -# scripts called by the tasks in the workflow. -# -#----------------------------------------------------------------------- -# -function setup() { -# -#----------------------------------------------------------------------- -# -# Get the full path to the file in which this script/function is located -# (scrfunc_fp), the name of that file (scrfunc_fn), and the directory in -# which the file is located (scrfunc_dir). -# -#----------------------------------------------------------------------- -# -local scrfunc_fp=$( $READLINK -f "${BASH_SOURCE[0]}" ) -local scrfunc_fn=$( basename "${scrfunc_fp}" ) -local scrfunc_dir=$( dirname "${scrfunc_fp}" ) -# -#----------------------------------------------------------------------- -# -# Get the name of this function. -# -#----------------------------------------------------------------------- -# -local func_name="${FUNCNAME[0]}" -# -#----------------------------------------------------------------------- -# -# -# -#----------------------------------------------------------------------- -# -cd_vrfy ${scrfunc_dir} -# -#----------------------------------------------------------------------- -# -# Source bash utility functions. -# -#----------------------------------------------------------------------- -# - -. ./source_util_funcs.sh - -print_info_msg " -======================================================================== -Starting function ${func_name}() in \"${scrfunc_fn}\"... -========================================================================" -# -#----------------------------------------------------------------------- -# -# Source other necessary files. -# -#----------------------------------------------------------------------- -# -. ./check_expt_config_vars.sh -. ./set_cycle_dates.sh -. ./set_predef_grid_params.sh -. ./set_gridparams_GFDLgrid.sh -. ./set_gridparams_ESGgrid.sh -. ./link_fix.sh -. ./set_ozone_param.sh -. ./set_thompson_mp_fix_files.sh -. ./check_ruc_lsm.sh -# -#----------------------------------------------------------------------- -# -# Save current shell options (in a global array). Then set new options -# for this script/function. -# -#----------------------------------------------------------------------- -# -{ save_shell_opts; set -u +x; } > /dev/null 2>&1 -# -#----------------------------------------------------------------------- -# -# Set the name of the configuration file containing default values for -# the experiment/workflow variables. Then source the file. -# -#----------------------------------------------------------------------- -# -EXPT_DEFAULT_CONFIG_FN="config_defaults.sh" -. ./${EXPT_DEFAULT_CONFIG_FN} -# -#----------------------------------------------------------------------- -# -# If a user-specified configuration file exists, source it. This file -# contains user-specified values for a subset of the experiment/workflow -# variables that override their default values. Note that the user- -# specified configuration file is not tracked by the repository, whereas -# the default configuration file is tracked. -# -#----------------------------------------------------------------------- -# -if [ -f "${EXPT_CONFIG_FN}" ]; then -# -# We require that the variables being set in the user-specified configu- -# ration file have counterparts in the default configuration file. This -# is so that we do not introduce new variables in the user-specified -# configuration file without also officially introducing them in the de- -# fault configuration file. Thus, before sourcing the user-specified -# configuration file, we check that all variables in the user-specified -# configuration file are also assigned default values in the default -# configuration file. -# - check_expt_config_vars \ - default_config_fp="./${EXPT_DEFAULT_CONFIG_FN}" \ - config_fp="./${EXPT_CONFIG_FN}" -# -# Now source the user-specified configuration file. -# - . ./${EXPT_CONFIG_FN} -# -fi -# -#----------------------------------------------------------------------- -# -# Source the script defining the valid values of experiment variables. -# -#----------------------------------------------------------------------- -# -. ./valid_param_vals.sh -# -#----------------------------------------------------------------------- -# -# Make sure that user-defined variables are set to valid values -# -# Set binary switch variables to either "TRUE" or "FALSE" by calling -# boolify so we don't have to consider other valid values later on. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "RUN_ENVIR" "valid_vals_RUN_ENVIR" - -check_var_valid_value "VERBOSE" "valid_vals_BOOLEAN" -VERBOSE=$(boolify "$VERBOSE") - -check_var_valid_value "DEBUG" "valid_vals_BOOLEAN" -DEBUG=$(boolify "$DEBUG") - -check_var_valid_value "USE_CRON_TO_RELAUNCH" "valid_vals_BOOLEAN" -USE_CRON_TO_RELAUNCH=$(boolify "${USE_CRON_TO_RELAUNCH}") -# -#----------------------------------------------------------------------- -# -# If DEBUG is set to "TRUE", set VERBOSE to "TRUE" to print out all -# of the VERBOSE output (in addition to any DEBUG output). -# -#----------------------------------------------------------------------- -# -if [ "$DEBUG" = "TRUE" ]; then - print_info_msg " -Setting VERBOSE to \"TRUE\" because DEBUG has been set to \"TRUE\"..." - VERBOSE="TRUE" -fi -# -#----------------------------------------------------------------------- -# -# Check flags that turn on/off various workflow tasks. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "RUN_TASK_MAKE_GRID" "valid_vals_BOOLEAN" -RUN_TASK_MAKE_GRID=$(boolify "${RUN_TASK_MAKE_GRID}") - -check_var_valid_value "RUN_TASK_MAKE_OROG" "valid_vals_BOOLEAN" -RUN_TASK_MAKE_OROG=$(boolify "${RUN_TASK_MAKE_OROG}") - -check_var_valid_value "RUN_TASK_MAKE_SFC_CLIMO" "valid_vals_BOOLEAN" -RUN_TASK_MAKE_SFC_CLIMO=$(boolify "${RUN_TASK_MAKE_SFC_CLIMO}") - -check_var_valid_value "RUN_TASK_GET_EXTRN_ICS" "valid_vals_BOOLEAN" -RUN_TASK_GET_EXTRN_ICS=$(boolify "${RUN_TASK_GET_EXTRN_ICS}") - -check_var_valid_value "RUN_TASK_GET_EXTRN_LBCS" "valid_vals_BOOLEAN" -RUN_TASK_GET_EXTRN_LBCS=$(boolify "${RUN_TASK_GET_EXTRN_LBCS}") - -check_var_valid_value "RUN_TASK_RUN_FCST" "valid_vals_BOOLEAN" -RUN_TASK_RUN_FCST=$(boolify "${RUN_TASK_RUN_FCST}") - -check_var_valid_value "RUN_TASK_RUN_POST" "valid_vals_BOOLEAN" -RUN_TASK_RUN_POST=$(boolify "${RUN_TASK_RUN_POST}") - -check_var_valid_value "RUN_TASK_GET_OBS_CCPA" "valid_vals_BOOLEAN" -RUN_TASK_GET_OBS_CCPA=$(boolify "${RUN_TASK_GET_OBS_CCPA}") - -check_var_valid_value "RUN_TASK_GET_OBS_MRMS" "valid_vals_BOOLEAN" -RUN_TASK_GET_OBS_MRMS=$(boolify "${RUN_TASK_GET_OBS_MRMS}") - -check_var_valid_value "RUN_TASK_GET_OBS_NDAS" "valid_vals_BOOLEAN" -RUN_TASK_GET_OBS_NDAS=$(boolify "${RUN_TASK_GET_OBS_NDAS}") - -check_var_valid_value "RUN_TASK_VX_GRIDSTAT" "valid_vals_BOOLEAN" -RUN_TASK_VX_GRIDSTAT=$(boolify "${RUN_TASK_VX_GRIDSTAT}") - -check_var_valid_value "RUN_TASK_VX_POINTSTAT" "valid_vals_BOOLEAN" -RUN_TASK_VX_POINTSTAT=$(boolify "${RUN_TASK_VX_POINTSTAT}") - -check_var_valid_value "RUN_TASK_VX_ENSGRID" "valid_vals_BOOLEAN" -RUN_TASK_VX_ENSGRID=$(boolify "${RUN_TASK_VX_ENSGRID}") - -check_var_valid_value "RUN_TASK_VX_ENSPOINT" "valid_vals_BOOLEAN" -RUN_TASK_VX_ENSPOINT=$(boolify "${RUN_TASK_VX_ENSPOINT}") -# -#----------------------------------------------------------------------- -# -# Check stochastic physcs flags. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "DO_SHUM" "valid_vals_BOOLEAN" -DO_SHUM=$(boolify "${DO_SHUM}") - -check_var_valid_value "DO_SPPT" "valid_vals_BOOLEAN" -DO_SPPT=$(boolify "${DO_SPPT}") - -check_var_valid_value "DO_SKEB" "valid_vals_BOOLEAN" -DO_SKEB=$(boolify "${DO_SKEB}") - -check_var_valid_value "DO_SPP" "valid_vals_BOOLEAN" -DO_SPP=$(boolify "${DO_SPP}") - -check_var_valid_value "DO_LSM_SPP" "valid_vals_BOOLEAN" -DO_LSM_SPP=$(boolify "${DO_LSM_SPP}") - -check_var_valid_value "USE_ZMTNBLCK" "valid_vals_BOOLEAN" -USE_ZMTNBLCK=$(boolify "${USE_ZMTNBLCK}") -# -#----------------------------------------------------------------------- -# -# Set magnitude of stochastic ad-hoc schemes to -999.0 if they are not -# being used. This is required at the moment, since "do_shum/sppt/skeb" -# does not override the use of the scheme unless the magnitude is also -# specifically set to -999.0. If all "do_shum/sppt/skeb" are set to -# "false," then none will run, regardless of the magnitude values. -# -#----------------------------------------------------------------------- -# -if [ "${DO_SHUM}" = "FALSE" ]; then - SHUM_MAG=-999.0 -fi -if [ "${DO_SKEB}" = "FALSE" ]; then - SKEB_MAG=-999.0 -fi -if [ "${DO_SPPT}" = "FALSE" ]; then - SPPT_MAG=-999.0 -fi -# -#----------------------------------------------------------------------- -# -# If running with SPP in MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or -# RRTMG, count the number of entries in SPP_VAR_LIST to correctly set -# N_VAR_SPP, otherwise set it to zero. -# -#----------------------------------------------------------------------- -# -N_VAR_SPP=0 -if [ "${DO_SPP}" = "TRUE" ]; then - N_VAR_SPP=${#SPP_VAR_LIST[@]} -fi -# -#----------------------------------------------------------------------- -# -# If running with Noah or RUC-LSM SPP, count the number of entries in -# LSM_SPP_VAR_LIST to correctly set N_VAR_LNDP, otherwise set it to zero. -# Also set LNDP_TYPE to 2 for LSM SPP, otherwise set it to zero. Finally, -# initialize an "FHCYC_LSM_SPP" variable to 0 and set it to 999 if LSM SPP -# is turned on. This requirement is necessary since LSM SPP cannot run with -# FHCYC=0 at the moment, but FHCYC cannot be set to anything less than the -# length of the forecast either. A bug fix will be submitted to -# ufs-weather-model soon, at which point, this requirement can be removed -# from regional_workflow. -# -#----------------------------------------------------------------------- -# -N_VAR_LNDP=0 -LNDP_TYPE=0 -LNDP_MODEL_TYPE=0 -FHCYC_LSM_SPP_OR_NOT=0 -if [ "${DO_LSM_SPP}" = "TRUE" ]; then - N_VAR_LNDP=${#LSM_SPP_VAR_LIST[@]} - LNDP_TYPE=2 - LNDP_MODEL_TYPE=2 - FHCYC_LSM_SPP_OR_NOT=999 -fi -# -#----------------------------------------------------------------------- -# -# If running with SPP, confirm that each SPP-related namelist value -# contains the same number of entries as N_VAR_SPP (set above to be equal -# to the number of entries in SPP_VAR_LIST). -# -#----------------------------------------------------------------------- -# -if [ "${DO_SPP}" = "TRUE" ]; then - if [ "${#SPP_MAG_LIST[@]}" != "${N_VAR_SPP}" ] || \ - [ "${#SPP_LSCALE[@]}" != "${N_VAR_SPP}" ] || \ - [ "${#SPP_TSCALE[@]}" != "${N_VAR_SPP}" ] || \ - [ "${#SPP_SIGTOP1[@]}" != "${N_VAR_SPP}" ] || \ - [ "${#SPP_SIGTOP2[@]}" != "${N_VAR_SPP}" ] || \ - [ "${#SPP_STDDEV_CUTOFF[@]}" != "${N_VAR_SPP}" ] || \ - [ "${#ISEED_SPP[@]}" != "${N_VAR_SPP}" ]; then - print_err_msg_exit "\ -All MYNN PBL, MYNN SFC, GSL GWD, Thompson MP, or RRTMG SPP-related namelist -variables set in ${CONFIG_FN} must be equal in number of entries to what is -found in SPP_VAR_LIST: - Number of entries in SPP_VAR_LIST = \"${#SPP_VAR_LIST[@]}\"" - fi -fi -# -#----------------------------------------------------------------------- -# -# If running with LSM SPP, confirm that each LSM SPP-related namelist -# value contains the same number of entries as N_VAR_LNDP (set above to -# be equal to the number of entries in LSM_SPP_VAR_LIST). -# -#----------------------------------------------------------------------- -# -if [ "${DO_LSM_SPP}" = "TRUE" ]; then - if [ "${#LSM_SPP_MAG_LIST[@]}" != "${N_VAR_LNDP}" ] || \ - [ "${#LSM_SPP_LSCALE[@]}" != "${N_VAR_LNDP}" ] || \ - [ "${#LSM_SPP_TSCALE[@]}" != "${N_VAR_LNDP}" ]; then - print_err_msg_exit "\ -All Noah or RUC-LSM SPP-related namelist variables (except ISEED_LSM_SPP) -set in ${CONFIG_FN} must be equal in number of entries to what is found in -SPP_VAR_LIST: - Number of entries in SPP_VAR_LIST = \"${#LSM_SPP_VAR_LIST[@]}\"" - fi -fi -# -#----------------------------------------------------------------------- -# -# Make sure that DOT_OR_USCORE is set to a valid value. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "DOT_OR_USCORE" "valid_vals_DOT_OR_USCORE" -# -#----------------------------------------------------------------------- -# -# Make sure that USE_FVCOM is set to a valid value and assign directory -# and file names. -# -# Make sure that FVCOM_WCSTART is set to lowercase "warm" or "cold" -# -#----------------------------------------------------------------------- -# -check_var_valid_value "USE_FVCOM" "valid_vals_BOOLEAN" -USE_FVCOM=$(boolify "${USE_FVCOM}") - -check_var_valid_value "FVCOM_WCSTART" "valid_vals_FVCOM_WCSTART" -FVCOM_WCSTART=$(echo_lowercase $FVCOM_WCSTART) -# -#----------------------------------------------------------------------- -# -# Set various directories. -# -# HOMErrfs: -# Top directory of the clone of the FV3-LAM workflow git repository. -# -# USHDIR: -# Directory containing the shell scripts called by the workflow. -# -# SCRIPTSDIR: -# Directory containing the ex scripts called by the workflow. -# -# JOBSSDIR: -# Directory containing the jjobs scripts called by the workflow. -# -# SORCDIR: -# Directory containing various source codes. -# -# PARMDIR: -# Directory containing parameter files, template files, etc. -# -# EXECDIR: -# Directory containing various executable files. -# -# TEMPLATE_DIR: -# Directory in which templates of various FV3-LAM input files are locat- -# ed. -# -# UFS_WTHR_MDL_DIR: -# Directory in which the (NEMS-enabled) FV3-LAM application is located. -# This directory includes subdirectories for FV3, NEMS, and FMS. -# -#----------------------------------------------------------------------- -# - -# -# The current script should be located in the ush subdirectory of the -# workflow directory. Thus, the workflow directory is the one above the -# directory of the current script. -# -SR_WX_APP_TOP_DIR=${scrfunc_dir%/*/*} - -# -#----------------------------------------------------------------------- -# -# Set the base directories in which codes obtained from external reposi- -# tories (using the manage_externals tool) are placed. Obtain the rela- -# tive paths to these directories by reading them in from the manage_ex- -# ternals configuration file. (Note that these are relative to the lo- -# cation of the configuration file.) Then form the full paths to these -# directories. Finally, make sure that each of these directories actu- -# ally exists. -# -#----------------------------------------------------------------------- -# -mng_extrns_cfg_fn=$( $READLINK -f "${SR_WX_APP_TOP_DIR}/Externals.cfg" ) -property_name="local_path" -# -# Get the path to the workflow scripts -# -external_name=regional_workflow -HOMErrfs=$( \ -get_manage_externals_config_property \ -"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ -print_err_msg_exit "\ -Call to function get_manage_externals_config_property failed." -HOMErrfs="${SR_WX_APP_TOP_DIR}/${HOMErrfs}" -set +x -# -# Get the base directory of the FV3 forecast model code. -# -external_name="${FCST_MODEL}" -UFS_WTHR_MDL_DIR=$( \ -get_manage_externals_config_property \ -"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ -print_err_msg_exit "\ -Call to function get_manage_externals_config_property failed." - -UFS_WTHR_MDL_DIR="${SR_WX_APP_TOP_DIR}/${UFS_WTHR_MDL_DIR}" -if [ ! -d "${UFS_WTHR_MDL_DIR}" ]; then - print_err_msg_exit "\ -The base directory in which the FV3 source code should be located -(UFS_WTHR_MDL_DIR) does not exist: - UFS_WTHR_MDL_DIR = \"${UFS_WTHR_MDL_DIR}\" -Please clone the external repository containing the code in this directory, -build the executable, and then rerun the workflow." -fi -# -# Get the base directory of the UFS_UTILS codes. -# -external_name="ufs_utils" -UFS_UTILS_DIR=$( \ -get_manage_externals_config_property \ -"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ -print_err_msg_exit "\ -Call to function get_manage_externals_config_property failed." - -UFS_UTILS_DIR="${SR_WX_APP_TOP_DIR}/${UFS_UTILS_DIR}" -if [ ! -d "${UFS_UTILS_DIR}" ]; then - print_err_msg_exit "\ -The base directory in which the UFS utilities source codes should be lo- -cated (UFS_UTILS_DIR) does not exist: - UFS_UTILS_DIR = \"${UFS_UTILS_DIR}\" -Please clone the external repository containing the code in this direct- -ory, build the executables, and then rerun the workflow." -fi -# -# Get the base directory of the UPP code. -# -external_name="UPP" -UPP_DIR=$( \ -get_manage_externals_config_property \ -"${mng_extrns_cfg_fn}" "${external_name}" "${property_name}" ) || \ -print_err_msg_exit "\ -Call to function get_manage_externals_config_property failed." - -UPP_DIR="${SR_WX_APP_TOP_DIR}/${UPP_DIR}" -if [ ! -d "${UPP_DIR}" ]; then - print_err_msg_exit "\ -The base directory in which the UPP source code should be located -(UPP_DIR) does not exist: - UPP_DIR = \"${UPP_DIR}\" -Please clone the external repository containing the code in this directory, -build the executable, and then rerun the workflow." -fi -# -# Define some other useful paths -# -USHDIR="$HOMErrfs/ush" -SCRIPTSDIR="$HOMErrfs/scripts" -JOBSDIR="$HOMErrfs/jobs" -SORCDIR="$HOMErrfs/sorc" -SRC_DIR="${SR_WX_APP_TOP_DIR}/src" -PARMDIR="$HOMErrfs/parm" -MODULES_DIR="$HOMErrfs/modulefiles" -EXECDIR="${SR_WX_APP_TOP_DIR}/${EXEC_SUBDIR}" -TEMPLATE_DIR="$USHDIR/templates" -VX_CONFIG_DIR="$TEMPLATE_DIR/parm" -METPLUS_CONF="$TEMPLATE_DIR/parm/metplus" -MET_CONFIG="$TEMPLATE_DIR/parm/met" -# -#----------------------------------------------------------------------- -# -# Convert machine name to upper case if necessary. Then make sure that -# MACHINE is set to a valid value. -# -#----------------------------------------------------------------------- -# -MACHINE=$( printf "%s" "$MACHINE" | $SED -e 's/\(.*\)/\U\1/' ) -check_var_valid_value "MACHINE" "valid_vals_MACHINE" -# -#----------------------------------------------------------------------- -# -# Source the machine config file containing architechture information, -# queue names, and supported input file paths. -# -#----------------------------------------------------------------------- -# -RELATIVE_LINK_FLAG="--relative" -MACHINE_FILE=${MACHINE_FILE:-${USHDIR}/machine/$(echo_lowercase $MACHINE).sh} -source $USHDIR/source_machine_file.sh - -if [ -z "${NCORES_PER_NODE:-}" ]; then - print_err_msg_exit "\ - NCORES_PER_NODE has not been specified in the file ${MACHINE_FILE} - Please ensure this value has been set for your desired platform. " -fi - -if [ -z "$FIXgsm" -o -z "$FIXaer" -o -z "$FIXlut" -o -z "$TOPO_DIR" -o -z "$SFC_CLIMO_INPUT_DIR" ]; then - print_err_msg_exit "\ -One or more fix file directories have not been specified for this machine: - MACHINE = \"$MACHINE\" - FIXgsm = \"${FIXgsm:-\"\"} - FIXaer = \"${FIXaer:-\"\"} - FIXlut = \"${FIXlut:-\"\"} - TOPO_DIR = \"${TOPO_DIR:-\"\"} - SFC_CLIMO_INPUT_DIR = \"${SFC_CLIMO_INPUT_DIR:-\"\"} - DOMAIN_PREGEN_BASEDIR = \"${DOMAIN_PREGEN_BASEDIR:-\"\"} -You can specify the missing location(s) in ${machine_file}" -fi -# -#----------------------------------------------------------------------- -# -# Make sure COMPILER is set to a valid value. -# -#----------------------------------------------------------------------- -# -COMPILER=$(echo_lowercase $COMPILER) -check_var_valid_value "COMPILER" "valid_vals_COMPILER" -# -#----------------------------------------------------------------------- -# -# Set the names of the build and workflow module files (if not already -# specified by the user). These are the files that need to be loaded -# before building the component SRW App codes and running various workflow -# scripts, respectively. -# -#----------------------------------------------------------------------- -# -machine=$(echo_lowercase ${MACHINE}) -WFLOW_MOD_FN=${WFLOW_MOD_FN:-"wflow_${machine}"} -BUILD_MOD_FN=${BUILD_MOD_FN:-"build_${machine}_${COMPILER}"} -# -#----------------------------------------------------------------------- -# -# Calculate a default value for the number of processes per node for the -# RUN_FCST_TN task. Then set PPN_RUN_FCST to this default value if -# PPN_RUN_FCST is not already specified by the user. -# -#----------------------------------------------------------------------- -# -ppn_run_fcst_default="$(( ${NCORES_PER_NODE} / ${OMP_NUM_THREADS_RUN_FCST} ))" -PPN_RUN_FCST=${PPN_RUN_FCST:-${ppn_run_fcst_default}} -# -#----------------------------------------------------------------------- -# -# Make sure SCHED is set to a valid value. -# -#----------------------------------------------------------------------- -# -SCHED=$(echo_lowercase $SCHED) -check_var_valid_value "SCHED" "valid_vals_SCHED" -# -#----------------------------------------------------------------------- -# -# If we are using a workflow manager check that the ACCOUNT variable is -# not empty. -# -#----------------------------------------------------------------------- -# -if [ "$WORKFLOW_MANAGER" != "none" ]; then - if [ -z "$ACCOUNT" ]; then - print_err_msg_exit "\ -The variable ACCOUNT cannot be empty if you are using a workflow manager: - ACCOUNT = \"$ACCOUNT\" - WORKFLOW_MANAGER = \"$WORKFLOW_MANAGER\"" - fi -fi -# -#----------------------------------------------------------------------- -# -# Set the grid type (GTYPE). In general, in the FV3 code, this can take -# on one of the following values: "global", "stretch", "nest", and "re- -# gional". The first three values are for various configurations of a -# global grid, while the last one is for a regional grid. Since here we -# are only interested in a regional grid, GTYPE must be set to "region- -# al". -# -#----------------------------------------------------------------------- -# -GTYPE="regional" -TILE_RGNL="7" -# -#----------------------------------------------------------------------- -# -# Make sure that GTYPE is set to a valid value. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "GTYPE" "valid_vals_GTYPE" -# -#----------------------------------------------------------------------- -# -# Make sure PREDEF_GRID_NAME is set to a valid value. -# -#----------------------------------------------------------------------- -# -if [ ! -z ${PREDEF_GRID_NAME} ]; then - err_msg="\ -The predefined regional grid specified in PREDEF_GRID_NAME is not sup- -ported: - PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\"" - check_var_valid_value \ - "PREDEF_GRID_NAME" "valid_vals_PREDEF_GRID_NAME" "${err_msg}" -fi -# -#----------------------------------------------------------------------- -# -# Make sure that PREEXISTING_DIR_METHOD is set to a valid value. -# -#----------------------------------------------------------------------- -# -check_var_valid_value \ - "PREEXISTING_DIR_METHOD" "valid_vals_PREEXISTING_DIR_METHOD" -# -#----------------------------------------------------------------------- -# -# Make sure CCPP_PHYS_SUITE is set to a valid value. -# -#----------------------------------------------------------------------- -# -err_msg="\ -The CCPP physics suite specified in CCPP_PHYS_SUITE is not supported: - CCPP_PHYS_SUITE = \"${CCPP_PHYS_SUITE}\"" -check_var_valid_value \ - "CCPP_PHYS_SUITE" "valid_vals_CCPP_PHYS_SUITE" "${err_msg}" -# -#----------------------------------------------------------------------- -# -# Make sure that USE_MERRA_CLIMO is set to a valid value. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "USE_MERRA_CLIMO" "valid_vals_BOOLEAN" -USE_MERRA_CLIMO=$(boolify "${USE_MERRA_CLIMO}") -# Force to "TRUE" in case of FV3_GFS_v15_thompson_mynn_lam3km: -if [ "${CCPP_PHYS_SUITE}" = "FV3_GFS_v15_thompson_mynn_lam3km" ]; then - USE_MERRA_CLIMO="TRUE" -fi -# -#----------------------------------------------------------------------- -# -# Make sure that FCST_MODEL is set to a valid value. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "FCST_MODEL" "valid_vals_FCST_MODEL" -# -#----------------------------------------------------------------------- -# -# Set CPL to TRUE/FALSE based on FCST_MODEL. -# -#----------------------------------------------------------------------- -# -if [ "${FCST_MODEL}" = "ufs-weather-model" ]; then - CPL="FALSE" -elif [ "${FCST_MODEL}" = "fv3gfs_aqm" ]; then - CPL="TRUE" -else - print_err_msg_exit "\ -The coupling flag CPL has not been specified for this value of FCST_MODEL: - FCST_MODEL = \"${FCST_MODEL}\"" -fi -# -#----------------------------------------------------------------------- -# -# Make sure RESTART_INTERVAL is set to an integer value if present -# -#----------------------------------------------------------------------- -# -if ! [[ "${RESTART_INTERVAL}" =~ ^[0-9]+$ ]]; then - print_err_msg_exit "\ -RESTART_INTERVAL must be set to an integer number of hours. - RESTART_INTERVAL = \"${RESTART_INTERVAL}\"" -fi -# -#----------------------------------------------------------------------- -# -# Check that DATE_FIRST_CYCL and DATE_LAST_CYCL are strings consisting -# of exactly 8 digits. -# -#----------------------------------------------------------------------- -# -date_or_null=$( printf "%s" "${DATE_FIRST_CYCL}" | \ - $SED -n -r -e "s/^([0-9]{8})$/\1/p" ) -if [ -z "${date_or_null}" ]; then - print_err_msg_exit "\ -DATE_FIRST_CYCL must be a string consisting of exactly 8 digits of the -form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit -month, and DD is the 2-digit day-of-month. - DATE_FIRST_CYCL = \"${DATE_FIRST_CYCL}\"" -fi - -date_or_null=$( printf "%s" "${DATE_LAST_CYCL}" | \ - $SED -n -r -e "s/^([0-9]{8})$/\1/p" ) -if [ -z "${date_or_null}" ]; then - print_err_msg_exit "\ -DATE_LAST_CYCL must be a string consisting of exactly 8 digits of the -form \"YYYYMMDD\", where YYYY is the 4-digit year, MM is the 2-digit -month, and DD is the 2-digit day-of-month. - DATE_LAST_CYCL = \"${DATE_LAST_CYCL}\"" -fi -# -#----------------------------------------------------------------------- -# -# Check that all elements of CYCL_HRS are strings consisting of exactly -# 2 digits that are between "00" and "23", inclusive. -# -#----------------------------------------------------------------------- -# -cycl_hrs_str=$(printf "\"%s\" " "${CYCL_HRS[@]}") -cycl_hrs_str="( ${cycl_hrs_str})" - -i=0 -for cycl_hr in "${CYCL_HRS[@]}"; do - - cycl_hr_or_null=$( printf "%s" "${cycl_hr}" | $SED -n -r -e "s/^([0-9]{2})$/\1/p" ) - - if [ -z "${cycl_hr_or_null}" ]; then - print_err_msg_exit "\ -Each element of CYCL_HRS must be a string consisting of exactly 2 digits -(including a leading \"0\", if necessary) specifying an hour-of-day. -Element #$i of CYCL_HRS (where the index of the first element is 0) does -not have this form: - CYCL_HRS = ${cycl_hrs_str} - CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" - fi - - if [ "${cycl_hr_or_null}" -lt "0" ] || \ - [ "${cycl_hr_or_null}" -gt "23" ]; then - print_err_msg_exit "\ -Each element of CYCL_HRS must be an integer between \"00\" and \"23\", -inclusive (including a leading \"0\", if necessary), specifying an hour- -of-day. Element #$i of CYCL_HRS (where the index of the first element -is 0) does not have this form: - CYCL_HRS = ${cycl_hrs_str} - CYCL_HRS[$i] = \"${CYCL_HRS[$i]}\"" - fi - - i=$(( $i+1 )) - -done -# -#----------------------------------------------------------------------- -# Check cycle increment for cycle frequency (cycl_freq). -# only if INCR_CYCL_FREQ < 24. -#----------------------------------------------------------------------- -# -if [ "${INCR_CYCL_FREQ}" -lt "24" ] && [ "$i" -gt "1" ]; then - cycl_intv="$(( 24/$i ))" - cycl_intv=( $( printf "%02d " "${cycl_intv}" ) ) - INCR_CYCL_FREQ=( $( printf "%02d " "${INCR_CYCL_FREQ}" ) ) - if [ "${cycl_intv}" -ne "${INCR_CYCL_FREQ}" ]; then - print_err_msg_exit "\ -The number of CYCL_HRS does not match with that expected by INCR_CYCL_FREQ: - INCR_CYCL_FREQ = ${INCR_CYCL_FREQ} - cycle interval by the number of CYCL_HRS = ${cycl_intv} - CYCL_HRS = ${cycl_hrs_str}" - fi - - im1=$(( $i-1 )) - for itmp in $( seq 1 ${im1} ); do - itm1=$(( ${itmp}-1 )) - cycl_next_itmp="$(( ${CYCL_HRS[itm1]} + ${INCR_CYCL_FREQ} ))" - cycl_next_itmp=( $( printf "%02d " "${cycl_next_itmp}" ) ) - if [ "${cycl_next_itmp}" -ne "${CYCL_HRS[$itmp]}" ]; then - print_err_msg_exit "\ -Element #${itmp} of CYCL_HRS does not match with the increment of cycle -frequency INCR_CYCL_FREQ: - CYCL_HRS = ${cycl_hrs_str} - INCR_CYCL_FREQ = ${INCR_CYCL_FREQ} - CYCL_HRS[$itmp] = \"${CYCL_HRS[$itmp]}\"" - fi - done -fi -# -#----------------------------------------------------------------------- -# -# Call a function to generate the array ALL_CDATES containing the cycle -# dates/hours for which to run forecasts. The elements of this array -# will have the form YYYYMMDDHH. They are the starting dates/times of -# the forecasts that will be run in the experiment. Then set NUM_CYCLES -# to the number of elements in this array. -# -#----------------------------------------------------------------------- -# -set_cycle_dates \ - date_start="${DATE_FIRST_CYCL}" \ - date_end="${DATE_LAST_CYCL}" \ - cycle_hrs="${cycl_hrs_str}" \ - incr_cycl_freq="${INCR_CYCL_FREQ}" \ - output_varname_all_cdates="ALL_CDATES" - -NUM_CYCLES="${#ALL_CDATES[@]}" - -if [ $NUM_CYCLES -gt 90 ] ; then - unset ALL_CDATES - print_info_msg "$VERBOSE" " -Too many cycles in ALL_CDATES to list, redefining in abbreviated form." -ALL_CDATES="${DATE_FIRST_CYCL}${CYCL_HRS[0]}...${DATE_LAST_CYCL}${CYCL_HRS[-1]}" -fi - -# -#----------------------------------------------------------------------- -# -# If using a custom post configuration file, make sure that it exists. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "USE_CUSTOM_POST_CONFIG_FILE" "valid_vals_BOOLEAN" -USE_CUSTOM_POST_CONFIG_FILE=$(boolify "${USE_CUSTOM_POST_CONFIG_FILE}") - -if [ ${USE_CUSTOM_POST_CONFIG_FILE} = "TRUE" ]; then - if [ ! -f "${CUSTOM_POST_CONFIG_FP}" ]; then - print_err_msg_exit " -The custom post configuration specified by CUSTOM_POST_CONFIG_FP does not -exist: - CUSTOM_POST_CONFIG_FP = \"${CUSTOM_POST_CONFIG_FP}\"" - fi -fi -# -#----------------------------------------------------------------------- -# -# Ensure that USE_CRTM is set to a valid value. Then, if it is set to -# "TRUE" (i.e. if using external CRTM fix files to allow post-processing -# of synthetic satellite products from the UPP, make sure that the fix -# file directory exists. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "USE_CRTM" "valid_vals_BOOLEAN" -USE_CRTM=$(boolify "${USE_CRTM}") - -if [ ${USE_CRTM} = "TRUE" ]; then - if [ ! -d "${CRTM_DIR}" ]; then - print_err_msg_exit " -The external CRTM fix file directory specified by CRTM_DIR does not exist: - CRTM_DIR = \"${CRTM_DIR}\"" - fi -fi -# -#----------------------------------------------------------------------- -# -# The forecast length (in integer hours) cannot contain more than 3 cha- -# racters. Thus, its maximum value is 999. Check whether the specified -# forecast length exceeds this maximum value. If so, print out a warn- -# ing and exit this script. -# -#----------------------------------------------------------------------- -# -fcst_len_hrs_max="999" -if [ "${FCST_LEN_HRS}" -gt "${fcst_len_hrs_max}" ]; then - print_err_msg_exit "\ -Forecast length is greater than maximum allowed length: - FCST_LEN_HRS = ${FCST_LEN_HRS} - fcst_len_hrs_max = ${fcst_len_hrs_max}" -fi -# -#----------------------------------------------------------------------- -# -# Check whether the forecast length (FCST_LEN_HRS) is evenly divisible -# by the BC update interval (LBC_SPEC_INTVL_HRS). If not, print out a -# warning and exit this script. If so, generate an array of forecast -# hours at which the boundary values will be updated. -# -#----------------------------------------------------------------------- -# -rem=$(( ${FCST_LEN_HRS}%${LBC_SPEC_INTVL_HRS} )) - -if [ "$rem" -ne "0" ]; then - print_err_msg_exit "\ -The forecast length (FCST_LEN_HRS) is not evenly divisible by the lateral -boundary conditions update interval (LBC_SPEC_INTVL_HRS): - FCST_LEN_HRS = ${FCST_LEN_HRS} - LBC_SPEC_INTVL_HRS = ${LBC_SPEC_INTVL_HRS} - rem = FCST_LEN_HRS%%LBC_SPEC_INTVL_HRS = $rem" -fi -# -#----------------------------------------------------------------------- -# -# Set the array containing the forecast hours at which the lateral -# boundary conditions (LBCs) need to be updated. Note that this array -# does not include the 0-th hour (initial time). -# -#----------------------------------------------------------------------- -# -LBC_SPEC_FCST_HRS=($( seq ${LBC_SPEC_INTVL_HRS} ${LBC_SPEC_INTVL_HRS} \ - ${FCST_LEN_HRS} )) -# -#----------------------------------------------------------------------- -# -# If PREDEF_GRID_NAME is set to a non-empty string, set or reset native -# and write-component grid parameters according to the specified predefined -# domain. -# -#----------------------------------------------------------------------- -# -if [ ! -z "${PREDEF_GRID_NAME}" ]; then - - set_predef_grid_params \ - predef_grid_name="${PREDEF_GRID_NAME}" \ - dt_atmos="${DT_ATMOS}" \ - layout_x="${LAYOUT_X}" \ - layout_y="${LAYOUT_Y}" \ - blocksize="${BLOCKSIZE}" \ - quilting="${QUILTING}" \ - outvarname_grid_gen_method="GRID_GEN_METHOD" \ - outvarname_esggrid_lon_ctr="ESGgrid_LON_CTR" \ - outvarname_esggrid_lat_ctr="ESGgrid_LAT_CTR" \ - outvarname_esggrid_delx="ESGgrid_DELX" \ - outvarname_esggrid_dely="ESGgrid_DELY" \ - outvarname_esggrid_nx="ESGgrid_NX" \ - outvarname_esggrid_ny="ESGgrid_NY" \ - outvarname_esggrid_pazi="ESGgrid_PAZI" \ - outvarname_esggrid_wide_halo_width="ESGgrid_WIDE_HALO_WIDTH" \ - outvarname_gfdlgrid_lon_t6_ctr="GFDLgrid_LON_T6_CTR" \ - outvarname_gfdlgrid_lat_t6_ctr="GFDLgrid_LAT_T6_CTR" \ - outvarname_gfdlgrid_stretch_fac="GFDLgrid_STRETCH_FAC" \ - outvarname_gfdlgrid_num_cells="GFDLgrid_NUM_CELLS" \ - outvarname_gfdlgrid_refine_ratio="GFDLgrid_REFINE_RATIO" \ - outvarname_gfdlgrid_istart_of_rgnl_dom_on_t6g="GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G" \ - outvarname_gfdlgrid_iend_of_rgnl_dom_on_t6g="GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G" \ - outvarname_gfdlgrid_jstart_of_rgnl_dom_on_t6g="GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G" \ - outvarname_gfdlgrid_jend_of_rgnl_dom_on_t6g="GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G" \ - outvarname_gfdlgrid_use_num_cells_in_filenames="GFDLgrid_USE_NUM_CELLS_IN_FILENAMES" \ - outvarname_dt_atmos="DT_ATMOS" \ - outvarname_layout_x="LAYOUT_X" \ - outvarname_layout_y="LAYOUT_Y" \ - outvarname_blocksize="BLOCKSIZE" \ - outvarname_wrtcmp_write_groups="WRTCMP_write_groups" \ - outvarname_wrtcmp_write_tasks_per_group="WRTCMP_write_tasks_per_group" \ - outvarname_wrtcmp_output_grid="WRTCMP_output_grid" \ - outvarname_wrtcmp_cen_lon="WRTCMP_cen_lon" \ - outvarname_wrtcmp_cen_lat="WRTCMP_cen_lat" \ - outvarname_wrtcmp_stdlat1="WRTCMP_stdlat1" \ - outvarname_wrtcmp_stdlat2="WRTCMP_stdlat2" \ - outvarname_wrtcmp_nx="WRTCMP_nx" \ - outvarname_wrtcmp_ny="WRTCMP_ny" \ - outvarname_wrtcmp_lon_lwr_left="WRTCMP_lon_lwr_left" \ - outvarname_wrtcmp_lat_lwr_left="WRTCMP_lat_lwr_left" \ - outvarname_wrtcmp_lon_upr_rght="WRTCMP_lon_upr_rght" \ - outvarname_wrtcmp_lat_upr_rght="WRTCMP_lat_upr_rght" \ - outvarname_wrtcmp_dx="WRTCMP_dx" \ - outvarname_wrtcmp_dy="WRTCMP_dy" \ - outvarname_wrtcmp_dlon="WRTCMP_dlon" \ - outvarname_wrtcmp_dlat="WRTCMP_dlat" - -fi -# -#----------------------------------------------------------------------- -# -# Make sure GRID_GEN_METHOD is set to a valid value. -# -#----------------------------------------------------------------------- -# -err_msg="\ -The horizontal grid generation method specified in GRID_GEN_METHOD is -not supported: - GRID_GEN_METHOD = \"${GRID_GEN_METHOD}\"" -check_var_valid_value \ - "GRID_GEN_METHOD" "valid_vals_GRID_GEN_METHOD" "${err_msg}" -# -#----------------------------------------------------------------------- -# -# For a "GFDLgrid" type of grid, make sure GFDLgrid_NUM_CELLS is set to -# a valid value. -# -#----------------------------------------------------------------------- -# -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - err_msg="\ -The number of grid cells per tile in each horizontal direction specified -in GFDLgrid_NUM_CELLS is not supported: - GFDLgrid_NUM_CELLS = \"${GFDLgrid_NUM_CELLS}\"" - check_var_valid_value "GFDLgrid_NUM_CELLS" "valid_vals_GFDLgrid_NUM_CELLS" "${err_msg}" -fi -# -#----------------------------------------------------------------------- -# -# Check to make sure that various computational parameters needed by the -# forecast model are set to non-empty values. At this point in the -# experiment generation, all of these should be set to valid (non-empty) -# values. -# -#----------------------------------------------------------------------- -# -if [ -z "${DT_ATMOS}" ]; then - print_err_msg_exit "\ -The forecast model main time step (DT_ATMOS) is set to a null string: - DT_ATMOS = ${DT_ATMOS} -Please set this to a valid numerical value in the user-specified experiment -configuration file (EXPT_CONFIG_FP) and rerun: - EXPT_CONFIG_FP = \"${EXPT_CONFIG_FP}\"" -fi - -if [ -z "${LAYOUT_X}" ]; then - print_err_msg_exit "\ -The number of MPI processes to be used in the x direction (LAYOUT_X) by -the forecast job is set to a null string: - LAYOUT_X = ${LAYOUT_X} -Please set this to a valid numerical value in the user-specified experiment -configuration file (EXPT_CONFIG_FP) and rerun: - EXPT_CONFIG_FP = \"${EXPT_CONFIG_FP}\"" -fi - -if [ -z "${LAYOUT_Y}" ]; then - print_err_msg_exit "\ -The number of MPI processes to be used in the y direction (LAYOUT_Y) by -the forecast job is set to a null string: - LAYOUT_Y = ${LAYOUT_Y} -Please set this to a valid numerical value in the user-specified experiment -configuration file (EXPT_CONFIG_FP) and rerun: - EXPT_CONFIG_FP = \"${EXPT_CONFIG_FP}\"" -fi - -if [ -z "${BLOCKSIZE}" ]; then - print_err_msg_exit "\ -The cache size to use for each MPI task of the forecast (BLOCKSIZE) is -set to a null string: - BLOCKSIZE = ${BLOCKSIZE} -Please set this to a valid numerical value in the user-specified experiment -configuration file (EXPT_CONFIG_FP) and rerun: - EXPT_CONFIG_FP = \"${EXPT_CONFIG_FP}\"" -fi -# -#----------------------------------------------------------------------- -# -# If performing sub-hourly model output and post-processing, check that -# the output interval DT_SUBHOURLY_POST_MNTS (in minutes) is specified -# correctly. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "SUB_HOURLY_POST" "valid_vals_BOOLEAN" -SUB_HOURLY_POST=$(boolify "${SUB_HOURLY_POST}") - -if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then -# -# Check that DT_SUBHOURLY_POST_MNTS is a string consisting of one or two -# digits. -# - mnts_or_null=$( printf "%s" "${DT_SUBHOURLY_POST_MNTS}" | \ - $SED -n -r -e "s/^([0-9])([0-9])?$/\1\2/p" ) - if [ -z "${mnts_or_null}" ]; then - print_err_msg_exit "\ -When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), -DT_SUBHOURLY_POST_MNTS must be set to a one- or two-digit integer but -in this case is not: - SUB_HOURLY_POST = \"${SUB_HOURLY_POST}\" - DT_SUBHOURLY_POST_MNTS = \"${DT_SUBHOURLY_POST_MNTS}\"" - fi -# -# Check that DT_SUBHOURLY_POST_MNTS is between 0 and 59, inclusive. -# - if [ ${DT_SUBHOURLY_POST_MNTS} -lt "0" ] || \ - [ ${DT_SUBHOURLY_POST_MNTS} -gt "59" ]; then - print_err_msg_exit "\ -When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), -DT_SUBHOURLY_POST_MNTS must be set to an integer between 0 and 59, -inclusive but in this case is not: - SUB_HOURLY_POST = \"${SUB_HOURLY_POST}\" - DT_SUBHOURLY_POST_MNTS = \"${DT_SUBHOURLY_POST_MNTS}\"" - fi -# -# Check that DT_SUBHOURLY_POST_MNTS (after converting to seconds) is -# evenly divisible by the forecast model's main time step DT_ATMOS. -# - rem=$(( DT_SUBHOURLY_POST_MNTS*60 % DT_ATMOS )) - if [ ${rem} -ne 0 ]; then - print_err_msg_exit "\ -When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), -the time interval specified by DT_SUBHOURLY_POST_MNTS (after converting -to seconds) must be evenly divisible by the time step DT_ATMOS used in -the forecast model, i.e. the remainder (rem) must be zero. In this case, -it is not: - SUB_HOURLY_POST = \"${SUB_HOURLY_POST}\" - DT_SUBHOURLY_POST_MNTS = \"${DT_SUBHOURLY_POST_MNTS}\" - DT_ATMOS = \"${DT_ATMOS}\" - rem = \$(( (DT_SUBHOURLY_POST_MNTS*60) %% DT_ATMOS )) = $rem -Please reset DT_SUBHOURLY_POST_MNTS and/or DT_ATMOS so that this remainder -is zero." - fi -# -# If DT_SUBHOURLY_POST_MNTS is set to 0 (with SUB_HOURLY_POST set to -# "TRUE"), then we're not really performing subhourly post-processing. -# In this case, reset SUB_HOURLY_POST to "FALSE" and print out an -# informational message that such a change was made. -# - if [ "${DT_SUBHOURLY_POST_MNTS}" -eq "0" ]; then - print_info_msg "\ -When performing sub-hourly post (i.e. SUB_HOURLY_POST set to \"TRUE\"), -DT_SUBHOURLY_POST_MNTS must be set to a value greater than 0; otherwise, -sub-hourly output is not really being performed: - SUB_HOURLY_POST = \"${SUB_HOURLY_POST}\" - DT_SUBHOURLY_POST_MNTS = \"${DT_SUBHOURLY_POST_MNTS}\" -Resetting SUB_HOURLY_POST to \"FALSE\". If you do not want this, you -must set DT_SUBHOURLY_POST_MNTS to something other than zero." - SUB_HOURLY_POST="FALSE" - fi -# -# For now, the sub-hourly capability is restricted to having values of -# DT_SUBHOURLY_POST_MNTS that evenly divide into 60 minutes. This is -# because the jinja rocoto XML template (${WFLOW_XML_FN}) assumes that -# model output is generated at the top of every hour (i.e. at 00 minutes). -# This restricts DT_SUBHOURLY_POST_MNTS to the following values (inluding -# both cases with and without a leading 0): -# -# "1" "01" "2" "02" "3" "03" "4" "04" "5" "05" "6" "06" "10" "12" "15" "20" "30" -# -# This restriction will be removed in a future version of the workflow, -# For now, check that DT_SUBHOURLY_POST_MNTS is one of the above values. -# - if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then - check_var_valid_value "DT_SUBHOURLY_POST_MNTS" "valid_vals_DT_SUBHOURLY_POST_MNTS" - fi - -fi -# -#----------------------------------------------------------------------- -# -# If the base directory (EXPT_BASEDIR) in which the experiment subdirectory -# (EXPT_SUBDIR) will be located does not start with a "/", then it is -# either set to a null string or contains a relative directory. In both -# cases, prepend to it the absolute path of the default directory under -# which the experiment directories are placed. If EXPT_BASEDIR was set -# to a null string, it will get reset to this default experiment directory, -# and if it was set to a relative directory, it will get reset to an -# absolute directory that points to the relative directory under the -# default experiment directory. Then create EXPT_BASEDIR if it doesn't -# already exist. -# -#----------------------------------------------------------------------- -# -if [ "${EXPT_BASEDIR:0:1}" != "/" ]; then - EXPT_BASEDIR="${SR_WX_APP_TOP_DIR}/../expt_dirs/${EXPT_BASEDIR}" -fi -EXPT_BASEDIR="$( $READLINK -m ${EXPT_BASEDIR} )" -mkdir_vrfy -p "${EXPT_BASEDIR}" -# -#----------------------------------------------------------------------- -# -# If the experiment subdirectory name (EXPT_SUBDIR) is set to an empty -# string, print out an error message and exit. -# -#----------------------------------------------------------------------- -# -if [ -z "${EXPT_SUBDIR}" ]; then - print_err_msg_exit "\ -The name of the experiment subdirectory (EXPT_SUBDIR) cannot be empty: - EXPT_SUBDIR = \"${EXPT_SUBDIR}\"" -fi -# -#----------------------------------------------------------------------- -# -# Set the full path to the experiment directory. Then check if it already -# exists and if so, deal with it as specified by PREEXISTING_DIR_METHOD. -# -#----------------------------------------------------------------------- -# -EXPTDIR="${EXPT_BASEDIR}/${EXPT_SUBDIR}" -check_for_preexist_dir_file "$EXPTDIR" "${PREEXISTING_DIR_METHOD}" -# -#----------------------------------------------------------------------- -# -# Set other directories, some of which may depend on EXPTDIR (depending -# on whether we're running in NCO or community mode, i.e. whether RUN_ENVIR -# is set to "nco" or "community"). Definitions: -# -# LOGDIR: -# Directory in which the log files from the workflow tasks will be placed. -# -# FIXam: -# This is the directory that will contain the fixed files or symlinks to -# the fixed files containing various fields on global grids (which are -# usually much coarser than the native FV3-LAM grid). -# -# FIXclim: -# This is the directory that will contain the MERRA2 aerosol climatology -# data file and lookup tables for optics properties -# -# FIXLAM: -# This is the directory that will contain the fixed files or symlinks to -# the fixed files containing the grid, orography, and surface climatology -# on the native FV3-LAM grid. -# -# CYCLE_BASEDIR: -# The base directory in which the directories for the various cycles will -# be placed. -# -# COMROOT: -# In NCO mode, this is the full path to the "com" directory under which -# output from the RUN_POST_TN task will be placed. Note that this output -# is not placed directly under COMROOT but several directories further -# down. More specifically, for a cycle starting at yyyymmddhh, it is at -# -# $COMROOT/$NET/$model_ver/$RUN.$yyyymmdd/$hh -# -# Below, we set COMROOT in terms of PTMP as COMROOT="$PTMP/com". COMOROOT -# is not used by the workflow in community mode. -# -# COMOUT_BASEDIR: -# In NCO mode, this is the base directory directly under which the output -# from the RUN_POST_TN task will be placed, i.e. it is the cycle-independent -# portion of the RUN_POST_TN task's output directory. It is given by -# -# $COMROOT/$NET/$model_ver -# -# COMOUT_BASEDIR is not used by the workflow in community mode. -# -#----------------------------------------------------------------------- -# -LOGDIR="${EXPTDIR}/log" - -FIXam="${EXPTDIR}/fix_am" -FIXclim="${EXPTDIR}/fix_clim" -FIXLAM="${EXPTDIR}/fix_lam" - -if [ "${RUN_ENVIR}" = "nco" ]; then - CYCLE_BASEDIR="${STMP}/tmpnwprd/${RUN}" - check_for_preexist_dir_file "${CYCLE_BASEDIR}" "${PREEXISTING_DIR_METHOD}" - COMROOT="${PTMP}/com" - COMOUT_BASEDIR="${COMROOT}/${NET}/${model_ver}" - check_for_preexist_dir_file "${COMOUT_BASEDIR}" "${PREEXISTING_DIR_METHOD}" -else - CYCLE_BASEDIR="$EXPTDIR" - COMROOT="" - COMOUT_BASEDIR="" -fi -# -#----------------------------------------------------------------------- -# -# -# If POST_OUTPUT_DOMAIN_NAME has not been specified by the user, set it -# to PREDEF_GRID_NAME (which won't be empty if using a predefined grid). -# Then change it to lowercase. Finally, ensure that it does not end up -# getting set to an empty string. -# -#----------------------------------------------------------------------- -# -POST_OUTPUT_DOMAIN_NAME="${POST_OUTPUT_DOMAIN_NAME:-${PREDEF_GRID_NAME}}" -POST_OUTPUT_DOMAIN_NAME=$(echo_lowercase ${POST_OUTPUT_DOMAIN_NAME}) - -if [ -z "${POST_OUTPUT_DOMAIN_NAME}" ]; then - print_err_msg_exit "\ -The domain name used in naming the run_post output files (POST_OUTPUT_DOMAIN_NAME) -has not been set: - POST_OUTPUT_DOMAIN_NAME = \"${POST_OUTPUT_DOMAIN_NAME}\" -If this experiment is not using a predefined grid (i.e. if PREDEF_GRID_NAME -is set to a null string), POST_OUTPUT_DOMAIN_NAME must be set in the SRW -App's configuration file (\"${EXPT_CONFIG_FN}\")." -fi -# -#----------------------------------------------------------------------- -# -# The FV3 forecast model needs the following input files in the run -# directory to start a forecast: -# -# (1) The data table file -# (2) The diagnostics table file -# (3) The field table file -# (4) The FV3 namelist file -# (5) The model configuration file -# (6) The NEMS configuration file -# (7) The CCPP physics suite definition file -# -# The workflow contains templates for the first six of these files. -# Template files are versions of these files that contain placeholder -# (i.e. dummy) values for various parameters. The experiment generation -# and/or the forecast task (i.e. J-job) scripts copy these templates to -# appropriate locations in the experiment directory (e.g. to the top of -# the experiment directory, to one of the cycle subdirectories, etc) and -# replace the placeholders with actual values to obtain the files that -# are used as inputs to the forecast model. -# -# Note that the CCPP physics suite defintion file (SDF) does not have a -# corresponding template file because it does not contain any values -# that need to be replaced according to the experiment configuration. -# This file simply needs to be copied over from its location in the -# forecast model's directory structure to the experiment directory. -# -# Below, we first set the names of the templates for the first six files -# listed above. We then set the full paths to these template files. -# Note that some of these file names depend on the physics suite while -# others do not. -# -#----------------------------------------------------------------------- -# -dot_ccpp_phys_suite_or_null=".${CCPP_PHYS_SUITE}" - -# Names of input files that the forecast model (ufs-weather-model) expects -# to read in. These should only be changed if the input file names in the -# forecast model code are changed. -#---------------------------------- -DATA_TABLE_FN="data_table" -DIAG_TABLE_FN="diag_table" -FIELD_TABLE_FN="field_table" -MODEL_CONFIG_FN="model_configure" -NEMS_CONFIG_FN="nems.configure" -#---------------------------------- - -DATA_TABLE_TMPL_FN="${DATA_TABLE_TMPL_FN:-${DATA_TABLE_FN}}" -DIAG_TABLE_TMPL_FN="${DIAG_TABLE_TMPL_FN:-${DIAG_TABLE_FN}}${dot_ccpp_phys_suite_or_null}" -FIELD_TABLE_TMPL_FN="${FIELD_TABLE_TMPL_FN:-${FIELD_TABLE_FN}}${dot_ccpp_phys_suite_or_null}" -MODEL_CONFIG_TMPL_FN="${MODEL_CONFIG_TMPL_FN:-${MODEL_CONFIG_FN}}" -NEMS_CONFIG_TMPL_FN="${NEMS_CONFIG_TMPL_FN:-${NEMS_CONFIG_FN}}" - -DATA_TABLE_TMPL_FP="${TEMPLATE_DIR}/${DATA_TABLE_TMPL_FN}" -DIAG_TABLE_TMPL_FP="${TEMPLATE_DIR}/${DIAG_TABLE_TMPL_FN}" -FIELD_TABLE_TMPL_FP="${TEMPLATE_DIR}/${FIELD_TABLE_TMPL_FN}" -FV3_NML_BASE_SUITE_FP="${TEMPLATE_DIR}/${FV3_NML_BASE_SUITE_FN}" -FV3_NML_YAML_CONFIG_FP="${TEMPLATE_DIR}/${FV3_NML_YAML_CONFIG_FN}" -FV3_NML_BASE_ENS_FP="${EXPTDIR}/${FV3_NML_BASE_ENS_FN}" -MODEL_CONFIG_TMPL_FP="${TEMPLATE_DIR}/${MODEL_CONFIG_TMPL_FN}" -NEMS_CONFIG_TMPL_FP="${TEMPLATE_DIR}/${NEMS_CONFIG_TMPL_FN}" -# -#----------------------------------------------------------------------- -# -# Set: -# -# 1) the variable CCPP_PHYS_SUITE_FN to the name of the CCPP physics -# suite definition file. -# 2) the variable CCPP_PHYS_SUITE_IN_CCPP_FP to the full path of this -# file in the forecast model's directory structure. -# 3) the variable CCPP_PHYS_SUITE_FP to the full path of this file in -# the experiment directory. -# -# Note that the experiment/workflow generation scripts will copy this -# file from CCPP_PHYS_SUITE_IN_CCPP_FP to CCPP_PHYS_SUITE_FP. Then, for -# each cycle, the forecast launch script will create a link in the cycle -# run directory to the copy of this file at CCPP_PHYS_SUITE_FP. -# -#----------------------------------------------------------------------- -# -CCPP_PHYS_SUITE_FN="suite_${CCPP_PHYS_SUITE}.xml" -CCPP_PHYS_SUITE_IN_CCPP_FP="${UFS_WTHR_MDL_DIR}/FV3/ccpp/suites/${CCPP_PHYS_SUITE_FN}" -CCPP_PHYS_SUITE_FP="${EXPTDIR}/${CCPP_PHYS_SUITE_FN}" -if [ ! -f "${CCPP_PHYS_SUITE_IN_CCPP_FP}" ]; then - print_err_msg_exit "\ -The CCPP suite definition file (CCPP_PHYS_SUITE_IN_CCPP_FP) does not exist -in the local clone of the ufs-weather-model: - CCPP_PHYS_SUITE_IN_CCPP_FP = \"${CCPP_PHYS_SUITE_IN_CCPP_FP}\"" -fi -# -#----------------------------------------------------------------------- -# -# Set: -# -# 1) the variable FIELD_DICT_FN to the name of the field dictionary -# file. -# 2) the variable FIELD_DICT_IN_UWM_FP to the full path of this -# file in the forecast model's directory structure. -# 3) the variable FIELD_DICT_FP to the full path of this file in -# the experiment directory. -# -#----------------------------------------------------------------------- -# -FIELD_DICT_FN="fd_nems.yaml" -FIELD_DICT_IN_UWM_FP="${UFS_WTHR_MDL_DIR}/tests/parm/${FIELD_DICT_FN}" -FIELD_DICT_FP="${EXPTDIR}/${FIELD_DICT_FN}" -if [ ! -f "${FIELD_DICT_IN_UWM_FP}" ]; then - print_err_msg_exit "\ -The field dictionary file (FIELD_DICT_IN_UWM_FP) does not exist -in the local clone of the ufs-weather-model: - FIELD_DICT_IN_UWM_FP = \"${FIELD_DICT_IN_UWM_FP}\"" -fi -# -#----------------------------------------------------------------------- -# -# Call the function that sets the ozone parameterization being used and -# modifies associated parameters accordingly. -# -#----------------------------------------------------------------------- -# -set_ozone_param \ - ccpp_phys_suite_fp="${CCPP_PHYS_SUITE_IN_CCPP_FP}" \ - output_varname_ozone_param="OZONE_PARAM" -# -#----------------------------------------------------------------------- -# -# Set the full paths to those forecast model input files that are cycle- -# independent, i.e. they don't include information about the cycle's -# starting day/time. These are: -# -# * The data table file [(1) in the list above)] -# * The field table file [(3) in the list above)] -# * The FV3 namelist file [(4) in the list above)] -# * The NEMS configuration file [(6) in the list above)] -# -# Since they are cycle-independent, the experiment/workflow generation -# scripts will place them in the main experiment directory (EXPTDIR). -# The script that runs each cycle will then create links to these files -# in the run directories of the individual cycles (which are subdirecto- -# ries under EXPTDIR). -# -# The remaining two input files to the forecast model, i.e. -# -# * The diagnostics table file [(2) in the list above)] -# * The model configuration file [(5) in the list above)] -# -# contain parameters that depend on the cycle start date. Thus, custom -# versions of these two files must be generated for each cycle and then -# placed directly in the run directories of the cycles (not EXPTDIR). -# For this reason, the full paths to their locations vary by cycle and -# cannot be set here (i.e. they can only be set in the loop over the -# cycles in the rocoto workflow XML file). -# -#----------------------------------------------------------------------- -# -DATA_TABLE_FP="${EXPTDIR}/${DATA_TABLE_FN}" -FIELD_TABLE_FP="${EXPTDIR}/${FIELD_TABLE_FN}" -FV3_NML_FN="${FV3_NML_BASE_SUITE_FN%.*}" -FV3_NML_FP="${EXPTDIR}/${FV3_NML_FN}" -NEMS_CONFIG_FP="${EXPTDIR}/${NEMS_CONFIG_FN}" -# -#----------------------------------------------------------------------- -# -# If USE_USER_STAGED_EXTRN_FILES is set to TRUE, make sure that the user- -# specified directories under which the external model files should be -# located actually exist. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "USE_USER_STAGED_EXTRN_FILES" "valid_vals_BOOLEAN" -USE_USER_STAGED_EXTRN_FILES=$(boolify "${USE_USER_STAGED_EXTRN_FILES}") - -if [ "${USE_USER_STAGED_EXTRN_FILES}" = "TRUE" ]; then - - # Check for the base directory up to the first templated field. - if [ ! -d "$(dirname ${EXTRN_MDL_SOURCE_BASEDIR_ICS%%\$*})" ]; then - print_err_msg_exit "\ -The directory (EXTRN_MDL_SOURCE_BASEDIR_ICS) in which the user-staged -external model files for generating ICs should be located does not exist: - EXTRN_MDL_SOURCE_BASEDIR_ICS = \"${EXTRN_MDL_SOURCE_BASEDIR_ICS}\"" - fi - - if [ ! -d "$(dirname ${EXTRN_MDL_SOURCE_BASEDIR_LBCS%%\$*})" ]; then - print_err_msg_exit "\ -The directory (EXTRN_MDL_SOURCE_BASEDIR_LBCS) in which the user-staged -external model files for generating LBCs should be located does not exist: - EXTRN_MDL_SOURCE_BASEDIR_LBCS = \"${EXTRN_MDL_SOURCE_BASEDIR_LBCS}\"" - fi - -fi - -check_var_valid_value "NOMADS" "valid_vals_BOOLEAN" -NOMADS=$(boolify "${NOMADS}") -# -#----------------------------------------------------------------------- -# -# Make sure that DO_ENSEMBLE is set to a valid value. Then set the names -# of the ensemble members. These will be used to set the ensemble member -# directories. Also, set the full path to the FV3 namelist file corresponding -# to each ensemble member. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "DO_ENSEMBLE" "valid_vals_BOOLEAN" -DO_ENSEMBLE=$(boolify "${DO_ENSEMBLE}") - -NDIGITS_ENSMEM_NAMES="0" -ENSMEM_NAMES=("") -FV3_NML_ENSMEM_FPS=("") -if [ "${DO_ENSEMBLE}" = "TRUE" ]; then - NDIGITS_ENSMEM_NAMES="${#NUM_ENS_MEMBERS}" -# Strip away all leading zeros in NUM_ENS_MEMBERS by converting it to a -# decimal (leading zeros will cause bash to interpret the number as an -# octal). Note that the variable definitions file will therefore contain -# the version of NUM_ENS_MEMBERS with any leading zeros stripped away. - NUM_ENS_MEMBERS="$((10#${NUM_ENS_MEMBERS}))" - fmt="%0${NDIGITS_ENSMEM_NAMES}d" - for (( i=0; i<${NUM_ENS_MEMBERS}; i++ )); do - ip1=$( printf "$fmt" $((i+1)) ) - ENSMEM_NAMES[$i]="mem${ip1}" - FV3_NML_ENSMEM_FPS[$i]="$EXPTDIR/${FV3_NML_FN}_${ENSMEM_NAMES[$i]}" - done -fi -# -#----------------------------------------------------------------------- -# -# Make sure that DO_ENSEMBLE is set to TRUE when running ensemble vx. -# -#----------------------------------------------------------------------- -# -if [ "${DO_ENSEMBLE}" = "FALSE" ] && [ "${RUN_TASK_VX_ENSGRID}" = "TRUE" -o \ - "${RUN_TASK_VX_ENSPOINT}" = "TRUE" ]; then - print_err_msg_exit "\ -Ensemble verification can not be run unless running in ensemble mode: - DO_ENSEMBLE = \"${DO_ENSEMBLE}\" - RUN_TASK_VX_ENSGRID = \"${RUN_TASK_VX_ENSGRID}\" - RUN_TASK_VX_ENSPOINT = \"${RUN_TASK_VX_ENSPOINT}\"" -fi -# -#----------------------------------------------------------------------- -# -# Set the full path to the forecast model executable. -# -#----------------------------------------------------------------------- -# -FV3_EXEC_FP="${EXECDIR}/${FV3_EXEC_FN}" -# -#----------------------------------------------------------------------- -# -# Set the full path to the script that can be used to (re)launch the -# workflow. Also, if USE_CRON_TO_RELAUNCH is set to TRUE, set the line -# to add to the cron table to automatically relaunch the workflow every -# CRON_RELAUNCH_INTVL_MNTS minutes. Otherwise, set the variable con- -# taining this line to a null string. -# -#----------------------------------------------------------------------- -# -WFLOW_LAUNCH_SCRIPT_FP="$USHDIR/${WFLOW_LAUNCH_SCRIPT_FN}" -WFLOW_LAUNCH_LOG_FP="$EXPTDIR/${WFLOW_LAUNCH_LOG_FN}" -if [ "${USE_CRON_TO_RELAUNCH}" = "TRUE" ]; then - CRONTAB_LINE="*/${CRON_RELAUNCH_INTVL_MNTS} * * * * cd $EXPTDIR && \ -./${WFLOW_LAUNCH_SCRIPT_FN} called_from_cron=\"TRUE\" >> ./${WFLOW_LAUNCH_LOG_FN} 2>&1" -else - CRONTAB_LINE="" -fi -# -#----------------------------------------------------------------------- -# -# Set the full path to the script that, for a given task, loads the -# necessary module files and runs the tasks. -# -#----------------------------------------------------------------------- -# -LOAD_MODULES_RUN_TASK_FP="$USHDIR/load_modules_run_task.sh" -# -#----------------------------------------------------------------------- -# -# Define the various work subdirectories under the main work directory. -# Each of these corresponds to a different step/substep/task in the pre- -# processing, as follows: -# -# GRID_DIR: -# Directory in which the grid files will be placed (if RUN_TASK_MAKE_GRID -# is set to "TRUE") or searched for (if RUN_TASK_MAKE_GRID is set to -# "FALSE"). -# -# OROG_DIR: -# Directory in which the orography files will be placed (if RUN_TASK_MAKE_OROG -# is set to "TRUE") or searched for (if RUN_TASK_MAKE_OROG is set to -# "FALSE"). -# -# SFC_CLIMO_DIR: -# Directory in which the surface climatology files will be placed (if -# RUN_TASK_MAKE_SFC_CLIMO is set to "TRUE") or searched for (if -# RUN_TASK_MAKE_SFC_CLIMO is set to "FALSE"). -# -#---------------------------------------------------------------------- -# -if [ "${RUN_ENVIR}" = "nco" ]; then - - nco_fix_dir="${DOMAIN_PREGEN_BASEDIR}/${PREDEF_GRID_NAME}" - if [ ! -d "${nco_fix_dir}" ]; then - print_err_msg_exit "\ -The directory (nco_fix_dir) that should contain the pregenerated grid, -orography, and surface climatology files does not exist: - nco_fix_dir = \"${nco_fix_dir}\"" - fi - - if [ "${RUN_TASK_MAKE_GRID}" = "TRUE" ] || \ - [ "${RUN_TASK_MAKE_GRID}" = "FALSE" -a \ - "${GRID_DIR}" != "${nco_fix_dir}" ]; then - - msg=" -When RUN_ENVIR is set to \"nco\", the workflow assumes that pregenerated -grid files already exist in the directory - - \${DOMAIN_PREGEN_BASEDIR}/\${PREDEF_GRID_NAME} - -where - - DOMAIN_PREGEN_BASEDIR = \"${DOMAIN_PREGEN_BASEDIR}\" - PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - -Thus, the MAKE_GRID_TN task must not be run (i.e. RUN_TASK_MAKE_GRID must -be set to \"FALSE\"), and the directory in which to look for the grid -files (i.e. GRID_DIR) must be set to the one above. Current values for -these quantities are: - - RUN_TASK_MAKE_GRID = \"${RUN_TASK_MAKE_GRID}\" - GRID_DIR = \"${GRID_DIR}\" - -Resetting RUN_TASK_MAKE_GRID to \"FALSE\" and GRID_DIR to the one above. -Reset values are: -" - - RUN_TASK_MAKE_GRID="FALSE" - GRID_DIR="${nco_fix_dir}" - - msg="$msg"" - RUN_TASK_MAKE_GRID = \"${RUN_TASK_MAKE_GRID}\" - GRID_DIR = \"${GRID_DIR}\" -" - - print_info_msg "$msg" - - fi - - if [ "${RUN_TASK_MAKE_OROG}" = "TRUE" ] || \ - [ "${RUN_TASK_MAKE_OROG}" = "FALSE" -a \ - "${OROG_DIR}" != "${nco_fix_dir}" ]; then - - msg=" -When RUN_ENVIR is set to \"nco\", the workflow assumes that pregenerated -orography files already exist in the directory - \${DOMAIN_PREGEN_BASEDIR}/\${PREDEF_GRID_NAME} - -where - - DOMAIN_PREGEN_BASEDIR = \"${DOMAIN_PREGEN_BASEDIR}\" - PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - -Thus, the MAKE_OROG_TN task must not be run (i.e. RUN_TASK_MAKE_OROG must -be set to \"FALSE\"), and the directory in which to look for the orography -files (i.e. OROG_DIR) must be set to the one above. Current values for -these quantities are: - - RUN_TASK_MAKE_OROG = \"${RUN_TASK_MAKE_OROG}\" - OROG_DIR = \"${OROG_DIR}\" - -Resetting RUN_TASK_MAKE_OROG to \"FALSE\" and OROG_DIR to the one above. -Reset values are: -" - - RUN_TASK_MAKE_OROG="FALSE" - OROG_DIR="${nco_fix_dir}" - - msg="$msg"" - RUN_TASK_MAKE_OROG = \"${RUN_TASK_MAKE_OROG}\" - OROG_DIR = \"${OROG_DIR}\" -" - - print_info_msg "$msg" - - fi - - if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "TRUE" ] || \ - [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" -a \ - "${SFC_CLIMO_DIR}" != "${nco_fix_dir}" ]; then - - msg=" -When RUN_ENVIR is set to \"nco\", the workflow assumes that pregenerated -surface climatology files already exist in the directory - - \${DOMAIN_PREGEN_BASEDIR}/\${PREDEF_GRID_NAME} - -where - - DOMAIN_PREGEN_BASEDIR = \"${DOMAIN_PREGEN_BASEDIR}\" - PREDEF_GRID_NAME = \"${PREDEF_GRID_NAME}\" - -Thus, the MAKE_SFC_CLIMO_TN task must not be run (i.e. RUN_TASK_MAKE_SFC_CLIMO -must be set to \"FALSE\"), and the directory in which to look for the -surface climatology files (i.e. SFC_CLIMO_DIR) must be set to the one -above. Current values for these quantities are: - - RUN_TASK_MAKE_SFC_CLIMO = \"${RUN_TASK_MAKE_SFC_CLIMO}\" - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" - -Resetting RUN_TASK_MAKE_SFC_CLIMO to \"FALSE\" and SFC_CLIMO_DIR to the -one above. Reset values are: -" - - RUN_TASK_MAKE_SFC_CLIMO="FALSE" - SFC_CLIMO_DIR="${nco_fix_dir}" - - msg="$msg"" - RUN_TASK_MAKE_SFC_CLIMO = \"${RUN_TASK_MAKE_SFC_CLIMO}\" - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\" -" - - print_info_msg "$msg" - - fi - - if [ "${RUN_TASK_VX_GRIDSTAT}" = "TRUE" ]; then - - msg=" -When RUN_ENVIR is set to \"nco\", it is assumed that the verification -will not be run. - RUN_TASK_VX_GRIDSTAT = \"${RUN_TASK_VX_GRIDSTAT}\" -Resetting RUN_TASK_VX_GRIDSTAT to \"FALSE\" -Reset value is:" - - RUN_TASK_VX_GRIDSTAT="FALSE" - - msg="$msg"" - RUN_TASK_VX_GRIDSTAT = \"${RUN_TASK_VX_GRIDSTAT}\" -" - - print_info_msg "$msg" - - fi - - if [ "${RUN_TASK_VX_POINTSTAT}" = "TRUE" ]; then - - msg=" -When RUN_ENVIR is set to \"nco\", it is assumed that the verification -will not be run. - RUN_TASK_VX_POINTSTAT = \"${RUN_TASK_VX_POINTSTAT}\" -Resetting RUN_TASK_VX_POINTSTAT to \"FALSE\" -Reset value is:" - - RUN_TASK_VX_POINTSTAT="FALSE" - - msg="$msg"" - RUN_TASK_VX_POINTSTAT = \"${RUN_TASK_VX_POINTSTAT}\" -" - - print_info_msg "$msg" - - fi - - if [ "${RUN_TASK_VX_ENSGRID}" = "TRUE" ]; then - - msg=" -When RUN_ENVIR is set to \"nco\", it is assumed that the verification -will not be run. - RUN_TASK_VX_ENSGRID = \"${RUN_TASK_VX_ENSGRID}\" -Resetting RUN_TASK_VX_ENSGRID to \"FALSE\" -Reset value is:" - - RUN_TASK_VX_ENSGRID="FALSE" - - msg="$msg"" - RUN_TASK_VX_ENSGRID = \"${RUN_TASK_VX_ENSGRID}\" -" - - print_info_msg "$msg" - - fi -# -#----------------------------------------------------------------------- -# -# Now consider community mode. -# -#----------------------------------------------------------------------- -# -else -# -# If RUN_TASK_MAKE_GRID is set to "FALSE", the workflow will look for -# the pregenerated grid files in GRID_DIR. In this case, make sure that -# GRID_DIR exists. Otherwise, set it to a predefined location under the -# experiment directory (EXPTDIR). -# - if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - if [ ! -d "${GRID_DIR}" ]; then - print_err_msg_exit "\ -The directory (GRID_DIR) that should contain the pregenerated grid files -does not exist: - GRID_DIR = \"${GRID_DIR}\"" - fi - else - GRID_DIR="$EXPTDIR/grid" - fi -# -# If RUN_TASK_MAKE_OROG is set to "FALSE", the workflow will look for -# the pregenerated orography files in OROG_DIR. In this case, make sure -# that OROG_DIR exists. Otherwise, set it to a predefined location under -# the experiment directory (EXPTDIR). -# - if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - if [ ! -d "${OROG_DIR}" ]; then - print_err_msg_exit "\ -The directory (OROG_DIR) that should contain the pregenerated orography -files does not exist: - OROG_DIR = \"${OROG_DIR}\"" - fi - else - OROG_DIR="$EXPTDIR/orog" - fi -# -# If RUN_TASK_MAKE_SFC_CLIMO is set to "FALSE", the workflow will look -# for the pregenerated surface climatology files in SFC_CLIMO_DIR. In -# this case, make sure that SFC_CLIMO_DIR exists. Otherwise, set it to -# a predefined location under the experiment directory (EXPTDIR). -# - if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - if [ ! -d "${SFC_CLIMO_DIR}" ]; then - print_err_msg_exit "\ -The directory (SFC_CLIMO_DIR) that should contain the pregenerated surface -climatology files does not exist: - SFC_CLIMO_DIR = \"${SFC_CLIMO_DIR}\"" - fi - else - SFC_CLIMO_DIR="$EXPTDIR/sfc_climo" - fi - -fi -# -#----------------------------------------------------------------------- -# -# Make sure EXTRN_MDL_NAME_ICS is set to a valid value. -# -#----------------------------------------------------------------------- -# -err_msg="\ -The external model specified in EXTRN_MDL_NAME_ICS that provides initial -conditions (ICs) and surface fields to the FV3-LAM is not supported: - EXTRN_MDL_NAME_ICS = \"${EXTRN_MDL_NAME_ICS}\"" -check_var_valid_value \ - "EXTRN_MDL_NAME_ICS" "valid_vals_EXTRN_MDL_NAME_ICS" "${err_msg}" -# -#----------------------------------------------------------------------- -# -# Make sure EXTRN_MDL_NAME_LBCS is set to a valid value. -# -#----------------------------------------------------------------------- -# -err_msg="\ -The external model specified in EXTRN_MDL_NAME_ICS that provides lateral -boundary conditions (LBCs) to the FV3-LAM is not supported: - EXTRN_MDL_NAME_LBCS = \"${EXTRN_MDL_NAME_LBCS}\"" -check_var_valid_value \ - "EXTRN_MDL_NAME_LBCS" "valid_vals_EXTRN_MDL_NAME_LBCS" "${err_msg}" -# -#----------------------------------------------------------------------- -# -# Make sure FV3GFS_FILE_FMT_ICS is set to a valid value. -# -#----------------------------------------------------------------------- -# -if [ "${EXTRN_MDL_NAME_ICS}" = "FV3GFS" ]; then - err_msg="\ -The file format for FV3GFS external model files specified in FV3GFS_- -FILE_FMT_ICS is not supported: - FV3GFS_FILE_FMT_ICS = \"${FV3GFS_FILE_FMT_ICS}\"" - check_var_valid_value \ - "FV3GFS_FILE_FMT_ICS" "valid_vals_FV3GFS_FILE_FMT_ICS" "${err_msg}" -fi -# -#----------------------------------------------------------------------- -# -# Make sure FV3GFS_FILE_FMT_LBCS is set to a valid value. -# -#----------------------------------------------------------------------- -# -if [ "${EXTRN_MDL_NAME_LBCS}" = "FV3GFS" ]; then - err_msg="\ -The file format for FV3GFS external model files specified in FV3GFS_- -FILE_FMT_LBCS is not supported: - FV3GFS_FILE_FMT_LBCS = \"${FV3GFS_FILE_FMT_LBCS}\"" - check_var_valid_value \ - "FV3GFS_FILE_FMT_LBCS" "valid_vals_FV3GFS_FILE_FMT_LBCS" "${err_msg}" -fi -# -#----------------------------------------------------------------------- -# -# Set cycle-independent parameters associated with the external models -# from which we will obtain the ICs and LBCs. -# -#----------------------------------------------------------------------- -# -. ./set_extrn_mdl_params.sh -# -#----------------------------------------------------------------------- -# -# Set parameters according to the type of horizontal grid generation -# method specified. First consider GFDL's global-parent-grid based -# method. -# -#----------------------------------------------------------------------- -# -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - - set_gridparams_GFDLgrid \ - lon_of_t6_ctr="${GFDLgrid_LON_T6_CTR}" \ - lat_of_t6_ctr="${GFDLgrid_LAT_T6_CTR}" \ - res_of_t6g="${GFDLgrid_NUM_CELLS}" \ - stretch_factor="${GFDLgrid_STRETCH_FAC}" \ - refine_ratio_t6g_to_t7g="${GFDLgrid_REFINE_RATIO}" \ - istart_of_t7_on_t6g="${GFDLgrid_ISTART_OF_RGNL_DOM_ON_T6G}" \ - iend_of_t7_on_t6g="${GFDLgrid_IEND_OF_RGNL_DOM_ON_T6G}" \ - jstart_of_t7_on_t6g="${GFDLgrid_JSTART_OF_RGNL_DOM_ON_T6G}" \ - jend_of_t7_on_t6g="${GFDLgrid_JEND_OF_RGNL_DOM_ON_T6G}" \ - verbose="${VERBOSE}" \ - outvarname_lon_of_t7_ctr="LON_CTR" \ - outvarname_lat_of_t7_ctr="LAT_CTR" \ - outvarname_nx_of_t7_on_t7g="NX" \ - outvarname_ny_of_t7_on_t7g="NY" \ - outvarname_halo_width_on_t7g="NHW" \ - outvarname_stretch_factor="STRETCH_FAC" \ - outvarname_istart_of_t7_with_halo_on_t6sg="ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ - outvarname_iend_of_t7_with_halo_on_t6sg="IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ - outvarname_jstart_of_t7_with_halo_on_t6sg="JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" \ - outvarname_jend_of_t7_with_halo_on_t6sg="JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG" -# -#----------------------------------------------------------------------- -# -# Now consider Jim Purser's map projection/grid generation method. -# -#----------------------------------------------------------------------- -# -elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then - - set_gridparams_ESGgrid \ - lon_ctr="${ESGgrid_LON_CTR}" \ - lat_ctr="${ESGgrid_LAT_CTR}" \ - nx="${ESGgrid_NX}" \ - ny="${ESGgrid_NY}" \ - pazi="${ESGgrid_PAZI}" \ - halo_width="${ESGgrid_WIDE_HALO_WIDTH}" \ - delx="${ESGgrid_DELX}" \ - dely="${ESGgrid_DELY}" \ - outvarname_lon_ctr="LON_CTR" \ - outvarname_lat_ctr="LAT_CTR" \ - outvarname_nx="NX" \ - outvarname_ny="NY" \ - outvarname_pazi="PAZI" \ - outvarname_halo_width="NHW" \ - outvarname_stretch_factor="STRETCH_FAC" \ - outvarname_del_angle_x_sg="DEL_ANGLE_X_SG" \ - outvarname_del_angle_y_sg="DEL_ANGLE_Y_SG" \ - outvarname_neg_nx_of_dom_with_wide_halo="NEG_NX_OF_DOM_WITH_WIDE_HALO" \ - outvarname_neg_ny_of_dom_with_wide_halo="NEG_NY_OF_DOM_WITH_WIDE_HALO" - -fi -# -#----------------------------------------------------------------------- -# -# Create a new experiment directory. Note that at this point we are -# guaranteed that there is no preexisting experiment directory. For -# platforms with no workflow manager, we need to create LOGDIR as well, -# since it won't be created later at runtime. -# -#----------------------------------------------------------------------- -# -mkdir_vrfy -p "$EXPTDIR" -mkdir_vrfy -p "$LOGDIR" -# -#----------------------------------------------------------------------- -# -# If not running the MAKE_GRID_TN, MAKE_OROG_TN, and/or MAKE_SFC_CLIMO -# tasks, create symlinks under the FIXLAM directory to pregenerated grid, -# orography, and surface climatology files. In the process, also set -# RES_IN_FIXLAM_FILENAMES, which is the resolution of the grid (in units -# of number of grid points on an equivalent global uniform cubed-sphere -# grid) used in the names of the fixed files in the FIXLAM directory. -# -#----------------------------------------------------------------------- -# -mkdir_vrfy -p "$FIXLAM" -RES_IN_FIXLAM_FILENAMES="" -# -#----------------------------------------------------------------------- -# -# If the grid file generation task in the workflow is going to be skipped -# (because pregenerated files are available), create links in the FIXLAM -# directory to the pregenerated grid files. -# -#----------------------------------------------------------------------- -# -res_in_grid_fns="" -if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - - link_fix \ - verbose="$VERBOSE" \ - file_group="grid" \ - output_varname_res_in_filenames="res_in_grid_fns" || \ - print_err_msg_exit "\ -Call to function to create links to grid files failed." - - RES_IN_FIXLAM_FILENAMES="${res_in_grid_fns}" - -fi -# -#----------------------------------------------------------------------- -# -# If the orography file generation task in the workflow is going to be -# skipped (because pregenerated files are available), create links in -# the FIXLAM directory to the pregenerated orography files. -# -#----------------------------------------------------------------------- -# -res_in_orog_fns="" -if [ "${RUN_TASK_MAKE_OROG}" = "FALSE" ]; then - - link_fix \ - verbose="$VERBOSE" \ - file_group="orog" \ - output_varname_res_in_filenames="res_in_orog_fns" || \ - print_err_msg_exit "\ -Call to function to create links to orography files failed." - - if [ ! -z "${RES_IN_FIXLAM_FILENAMES}" ] && \ - [ "${res_in_orog_fns}" -ne "${RES_IN_FIXLAM_FILENAMES}" ]; then - print_err_msg_exit "\ -The resolution extracted from the orography file names (res_in_orog_fns) -does not match the resolution in other groups of files already consi- -dered (RES_IN_FIXLAM_FILENAMES): - res_in_orog_fns = ${res_in_orog_fns} - RES_IN_FIXLAM_FILENAMES = ${RES_IN_FIXLAM_FILENAMES}" - else - RES_IN_FIXLAM_FILENAMES="${res_in_orog_fns}" - fi - -fi -# -#----------------------------------------------------------------------- -# -# If the surface climatology file generation task in the workflow is -# going to be skipped (because pregenerated files are available), create -# links in the FIXLAM directory to the pregenerated surface climatology -# files. -# -#----------------------------------------------------------------------- -# -res_in_sfc_climo_fns="" -if [ "${RUN_TASK_MAKE_SFC_CLIMO}" = "FALSE" ]; then - - link_fix \ - verbose="$VERBOSE" \ - file_group="sfc_climo" \ - output_varname_res_in_filenames="res_in_sfc_climo_fns" || \ - print_err_msg_exit "\ -Call to function to create links to surface climatology files failed." - - if [ ! -z "${RES_IN_FIXLAM_FILENAMES}" ] && \ - [ "${res_in_sfc_climo_fns}" -ne "${RES_IN_FIXLAM_FILENAMES}" ]; then - print_err_msg_exit "\ -The resolution extracted from the surface climatology file names (res_- -in_sfc_climo_fns) does not match the resolution in other groups of files -already considered (RES_IN_FIXLAM_FILENAMES): - res_in_sfc_climo_fns = ${res_in_sfc_climo_fns} - RES_IN_FIXLAM_FILENAMES = ${RES_IN_FIXLAM_FILENAMES}" - else - RES_IN_FIXLAM_FILENAMES="${res_in_sfc_climo_fns}" - fi - -fi -# -#----------------------------------------------------------------------- -# -# The variable CRES is needed in constructing various file names. If -# not running the make_grid task, we can set it here. Otherwise, it -# will get set to a valid value by that task. -# -#----------------------------------------------------------------------- -# -CRES="" -if [ "${RUN_TASK_MAKE_GRID}" = "FALSE" ]; then - CRES="C${RES_IN_FIXLAM_FILENAMES}" -fi -# -#----------------------------------------------------------------------- -# -# Make sure that WRITE_DOPOST is set to a valid value. -# -#----------------------------------------------------------------------- -# -check_var_valid_value "WRITE_DOPOST" "valid_vals_BOOLEAN" -WRITE_DOPOST=$(boolify "${WRITE_DOPOST}") - -if [ "$WRITE_DOPOST" = "TRUE" ] ; then - -# Turn off run_post - RUN_TASK_RUN_POST="FALSE" - -# Check if SUB_HOURLY_POST is on - if [ "${SUB_HOURLY_POST}" = "TRUE" ]; then - print_err_msg_exit "\ -SUB_HOURLY_POST is NOT available with Inline Post yet." - fi -fi - -check_var_valid_value "QUILTING" "valid_vals_BOOLEAN" -QUILTING=$(boolify "$QUILTING") - -check_var_valid_value "PRINT_ESMF" "valid_vals_BOOLEAN" -PRINT_ESMF=$(boolify "${PRINT_ESMF}") - -# -#----------------------------------------------------------------------- -# -# Calculate PE_MEMBER01. This is the number of MPI tasks used for the -# forecast, including those for the write component if QUILTING is set -# to "TRUE". -# -#----------------------------------------------------------------------- -# -PE_MEMBER01=$(( LAYOUT_X*LAYOUT_Y )) -if [ "$QUILTING" = "TRUE" ]; then - PE_MEMBER01=$(( ${PE_MEMBER01} + ${WRTCMP_write_groups}*${WRTCMP_write_tasks_per_group} )) -fi - -print_info_msg "$VERBOSE" " -The number of MPI tasks for the forecast (including those for the write -component if it is being used) are: - PE_MEMBER01 = ${PE_MEMBER01}" -# -#----------------------------------------------------------------------- -# -# If the write-component is going to be used to write output files to -# disk (i.e. if QUILTING is set to "TRUE"), make sure that the grid type -# used by the write-component (WRTCMP_output_grid) is set to a valid value. -# -#----------------------------------------------------------------------- -# -if [ "$QUILTING" = "TRUE" ]; then - err_msg="\ -The coordinate system used by the write-component output grid specified -in WRTCMP_output_grid is not supported: - WRTCMP_output_grid = \"${WRTCMP_output_grid}\"" - check_var_valid_value \ - "WRTCMP_output_grid" "valid_vals_WRTCMP_output_grid" "${err_msg}" -fi -# -#----------------------------------------------------------------------- -# -# Calculate the number of nodes (NNODES_RUN_FCST) to request from the job -# scheduler for the forecast task (RUN_FCST_TN). This is just PE_MEMBER01 -# dividied by the number of processes per node we want to request for this -# task (PPN_RUN_FCST), then rounded up to the nearest integer, i.e. -# -# NNODES_RUN_FCST = ceil(PE_MEMBER01/PPN_RUN_FCST) -# -# where ceil(...) is the ceiling function, i.e. it rounds its floating -# point argument up to the next larger integer. Since in bash, division -# of two integers returns a truncated integer, and since bash has no -# built-in ceil(...) function, we perform the rounding-up operation by -# adding the denominator (of the argument of ceil(...) above) minus 1 to -# the original numerator, i.e. by redefining NNODES_RUN_FCST to be -# -# NNODES_RUN_FCST = (PE_MEMBER01 + PPN_RUN_FCST - 1)/PPN_RUN_FCST -# -#----------------------------------------------------------------------- -# -NNODES_RUN_FCST=$(( (PE_MEMBER01 + PPN_RUN_FCST - 1)/PPN_RUN_FCST )) -# -#----------------------------------------------------------------------- -# -# Call the function that checks whether the RUC land surface model (LSM) -# is being called by the physics suite and sets the workflow variable -# SDF_USES_RUC_LSM to "TRUE" or "FALSE" accordingly. -# -#----------------------------------------------------------------------- -# -check_ruc_lsm \ - ccpp_phys_suite_fp="${CCPP_PHYS_SUITE_IN_CCPP_FP}" \ - output_varname_sdf_uses_ruc_lsm="SDF_USES_RUC_LSM" -# -#----------------------------------------------------------------------- -# -# Set the name of the file containing aerosol climatology data that, if -# necessary, can be used to generate approximate versions of the aerosol -# fields needed by Thompson microphysics. This file will be used to -# generate such approximate aerosol fields in the ICs and LBCs if Thompson -# MP is included in the physics suite and if the exteranl model for ICs -# or LBCs does not already provide these fields. Also, set the full path -# to this file. -# -#----------------------------------------------------------------------- -# -THOMPSON_MP_CLIMO_FN="Thompson_MP_MONTHLY_CLIMO.nc" -THOMPSON_MP_CLIMO_FP="$FIXam/${THOMPSON_MP_CLIMO_FN}" -# -#----------------------------------------------------------------------- -# -# Call the function that, if the Thompson microphysics parameterization -# is being called by the physics suite, modifies certain workflow arrays -# to ensure that fixed files needed by this parameterization are copied -# to the FIXam directory and appropriate symlinks to them are created in -# the run directories. This function also sets the workflow variable -# SDF_USES_THOMPSON_MP that indicates whether Thompson MP is called by -# the physics suite. -# -#----------------------------------------------------------------------- -# -set_thompson_mp_fix_files \ - ccpp_phys_suite_fp="${CCPP_PHYS_SUITE_IN_CCPP_FP}" \ - thompson_mp_climo_fn="${THOMPSON_MP_CLIMO_FN}" \ - output_varname_sdf_uses_thompson_mp="SDF_USES_THOMPSON_MP" -# -#----------------------------------------------------------------------- -# -# Set the full path to the experiment's variable definitions file. This -# file will contain definitions of variables (in bash syntax) needed by -# the various scripts in the workflow. -# -#----------------------------------------------------------------------- -# -GLOBAL_VAR_DEFNS_FP="$EXPTDIR/${GLOBAL_VAR_DEFNS_FN}" -# -#----------------------------------------------------------------------- -# -# Get the list of constants and their values. The result is saved in -# the variable "constant_defns". This will be written to the experiment's -# variable defintions file later below. -# -#----------------------------------------------------------------------- -# -print_info_msg " -Creating list of constants..." - -get_bash_file_contents fp="$USHDIR/${CONSTANTS_FN}" \ - outvarname_contents="constant_defns" - -print_info_msg "$DEBUG" " -The variable \"constant_defns\" containing definitions of various -constants is set as follows: - -${constant_defns} -" -# -#----------------------------------------------------------------------- -# -# Get the list of primary experiment variables and their default values -# from the default experiment configuration file (EXPT_DEFAULT_CONFIG_FN). -# By "primary", we mean those variables that are defined in the default -# configuration file and can be reset in the user-specified experiment -# configuration file (EXPT_CONFIG_FN). The default values will be updated -# below to user-specified ones and the result saved in the experiment's -# variable definitions file. -# -#----------------------------------------------------------------------- -# -print_info_msg " -Creating list of default experiment variable definitions..." - -get_bash_file_contents fp="$USHDIR/${EXPT_DEFAULT_CONFIG_FN}" \ - outvarname_contents="default_var_defns" - -print_info_msg "$DEBUG" " -The variable \"default_var_defns\" containing default values of primary -experiment variables is set as follows: - -${default_var_defns} -" -# -#----------------------------------------------------------------------- -# -# Create a list of primary experiment variable definitions containing -# updated values. By "updated", we mean non-default values. Values -# may have been updated due to the presence of user-specified values in -# the experiment configuration file (EXPT_CONFIG_FN) or due to other -# considerations (e.g. resetting depending on the platform the App is -# running on). -# -#----------------------------------------------------------------------- -# -print_info_msg " -Creating lists of (updated) experiment variable definitions..." -# -# Set the flag that specifies whether or not array variables will be -# recorded in the variable definitions file on one line or one element -# per line. Then, if writing arrays one element per line (i.e. multiline), -# set an escaped-newline character that needs to be included after every -# element of each array as the newline character in order for sed to -# write the line properly. -# -multiline_arrays="TRUE" -#multiline_arrays="FALSE" -escbksl_nl_or_null="" -if [ "${multiline_arrays}" = "TRUE" ]; then - escbksl_nl_or_null='\\\n' -fi -# -# Loop through the lines in default_var_defns. Reset the value of the -# variable on each line to the updated value (e.g. to a user-specified -# value, as opposed to the default value). The updated list of variables -# and values will be saved in var_defns. -# -var_defns="" -while read crnt_line; do -# -# Try to obtain the name of the variable being set on the current line. -# This will be successful only if the line consists of one or more non- -# whitespace characters representing the name of a variable followed by -# an equal sign, followed by zero or more characters representing the -# value that the variable is being set to. (Recall that in generating -# the variable default_var_defns, leading spaces on each line were -# stripped out). -# - var_name=$( printf "%s" "${crnt_line}" | $SED -n -r -e "s/^([^ ]*)=.*/\1/p" ) -# -# If var_name is not empty, then a variable name was found on the current -# line in default_var_defns. -# - if [ ! -z ${var_name} ]; then - - print_info_msg "$DEBUG" " -var_name = \"${var_name}\"" -# -# If the variable specified in var_name is set in the current environment -# (to either an empty or non-empty string), get its value and save it in -# var_value. Note that -# -# ${!var_name+x} -# -# will retrun the string "x" if the variable specified in var_name is -# set (to either an empty or non-empty string), and it will return an -# empty string if the variable specified in var_name is unset (i.e. if -# it is undefined). -# - unset "var_value" - if [ ! -z "${!var_name+x}" ]; then -# -# The variable may be a scalar or an array. Thus, we first treat it as -# an array and obtain the number of elements that it contains. -# - array_name_at="${var_name}[@]" - array=("${!array_name_at}") - num_elems="${#array[@]}" -# -# Set var_value to the updated value of the current experiment variable. -# How this is done depends on whether the variable is a scalar or an -# array. -# -# If the variable contains only one element, then it is a scalar. (It -# could be a 1-element array, but for simplicity, we treat that case as -# a scalar.) In this case, we enclose its value in single quotes and -# save the result in var_value. No variable expansion should be -# happening from variables saved in the var_defns file. -# - if [ "${num_elems}" -eq 1 ]; then - - var_value="${!var_name}" - rhs="'${var_value}'" -# -# If the variable contains more than one element, then it is an array. -# In this case, we build var_value in two steps as follows: -# -# 1) Generate a string containing each element of the array in double -# quotes and followed by a space (and followed by an optional backslash -# and newline if multiline_arrays has been set to "TRUE"). -# -# 2) Place parentheses around the double-quoted list of array elements -# generated in the first step. Note that there is no need to put a -# space before the closing parenthesis because during step 1 above, -# a space has already been placed after the last array element. -# - else - - var_value="" - printf -v "var_value" "%s${escbksl_nl_or_null}" "" - for (( i=0; i<${num_elems}; i++ )); do - printf -v "var_value" "%s${escbksl_nl_or_null}" "${var_value}\"${array[$i]}\" " - done - rhs="( ${var_value})" - - fi -# -# If for some reason the variable specified in var_name is not set in -# the current environment (to either an empty or non-empty string), below -# we will still include it in the variable definitions file and simply -# set it to a null string. Thus, here, we set its value (var_value) to -# an empty string). In this case, we also issue an informational message. -# - else - - print_info_msg " -The variable specified by \"var_name\" is not set in the current environment: - var_name = \"${var_name}\" -Setting its value in the variable definitions file to an empty string." - - rhs="''" - - fi -# -# Set the line containing the variable's definition. Then add the line -# to the list of all variable definitions. -# - var_defn="${var_name}=$rhs" - printf -v "var_defns" "%s\n" "${var_defns}${var_defn}" -# -# If var_name is empty, then a variable name was not found on the current -# line in default_var_defns. In this case, print out a warning and move -# on to the next line. -# - else - - print_info_msg " -Could not extract a variable name from the current line in \"default_var_defns\" -(probably because it does not contain an equal sign with no spaces on -either side): - crnt_line = \"${crnt_line}\" - var_name = \"${var_name}\" -Continuing to next line in \"default_var_defns\"." - - fi - -done <<< "${default_var_defns}" -# -#----------------------------------------------------------------------- -# -# Construct the experiment's variable definitions file. Below, we first -# record the contents we want to place in this file in the variable -# var_defns_file_contents, and we then write the contents of this -# variable to the file. -# -#----------------------------------------------------------------------- -# -print_info_msg " -Generating the global experiment variable definitions file specified by -GLOBAL_VAR_DEFNS_FN: - GLOBAL_VAR_DEFNS_FN = \"${GLOBAL_VAR_DEFNS_FN}\" -Full path to this file is: - GLOBAL_VAR_DEFNS_FP = \"${GLOBAL_VAR_DEFNS_FP}\" -For more detailed information, set DEBUG to \"TRUE\" in the experiment -configuration file (\"${EXPT_CONFIG_FN}\")." - -var_defns_file_contents="\ -# -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# Section 1: -# This section contains definitions of the various constants defined in -# the file ${CONSTANTS_FN}. -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# -${constant_defns} -# -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# Section 2: -# This section contains (most of) the primary experiment variables, i.e. -# those variables that are defined in the default configuration file -# (${EXPT_DEFAULT_CONFIG_FN}) and that can be reset via the user-specified -# experiment configuration file (${EXPT_CONFIG_FN}). -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# -${var_defns}" -# -# Append derived/secondary variable definitions (as well as comments) to -# the contents of the variable definitions file. -# -ensmem_names_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${ENSMEM_NAMES[@]}") -ensmem_names_str=$(printf "( %s${escbksl_nl_or_null})" "${ensmem_names_str}") - -fv3_nml_ensmem_fps_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${FV3_NML_ENSMEM_FPS[@]}") -fv3_nml_ensmem_fps_str=$(printf "( %s${escbksl_nl_or_null})" "${fv3_nml_ensmem_fps_str}") - -var_defns_file_contents=${var_defns_file_contents}"\ -# -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# Section 3: -# This section defines variables that have been derived from the primary -# set of experiment variables above (we refer to these as \"derived\" or -# \"secondary\" variables). -#----------------------------------------------------------------------- -#----------------------------------------------------------------------- -# - -# -#----------------------------------------------------------------------- -# -# Full path to workflow (re)launch script, its log file, and the line -# that gets added to the cron table to launch this script if the flag -# USE_CRON_TO_RELAUNCH is set to \"TRUE\". -# -#----------------------------------------------------------------------- -# -WFLOW_LAUNCH_SCRIPT_FP='${WFLOW_LAUNCH_SCRIPT_FP}' -WFLOW_LAUNCH_LOG_FP='${WFLOW_LAUNCH_LOG_FP}' -CRONTAB_LINE='${CRONTAB_LINE}' -# -#----------------------------------------------------------------------- -# -# Directories. -# -#----------------------------------------------------------------------- -# -SR_WX_APP_TOP_DIR='${SR_WX_APP_TOP_DIR}' -HOMErrfs='$HOMErrfs' -USHDIR='$USHDIR' -SCRIPTSDIR='$SCRIPTSDIR' -JOBSDIR='$JOBSDIR' -SORCDIR='$SORCDIR' -SRC_DIR='${SRC_DIR}' -PARMDIR='$PARMDIR' -MODULES_DIR='${MODULES_DIR}' -EXECDIR='$EXECDIR' -FIXam='$FIXam' -FIXclim='$FIXclim' -FIXLAM='$FIXLAM' -FIXgsm='$FIXgsm' -FIXaer='$FIXaer' -FIXlut='$FIXlut' -COMROOT='$COMROOT' -COMOUT_BASEDIR='${COMOUT_BASEDIR}' -TEMPLATE_DIR='${TEMPLATE_DIR}' -VX_CONFIG_DIR='${VX_CONFIG_DIR}' -METPLUS_CONF='${METPLUS_CONF}' -MET_CONFIG='${MET_CONFIG}' -UFS_WTHR_MDL_DIR='${UFS_WTHR_MDL_DIR}' -UFS_UTILS_DIR='${UFS_UTILS_DIR}' -SFC_CLIMO_INPUT_DIR='${SFC_CLIMO_INPUT_DIR}' -TOPO_DIR='${TOPO_DIR}' -UPP_DIR='${UPP_DIR}' - -EXPTDIR='$EXPTDIR' -LOGDIR='$LOGDIR' -CYCLE_BASEDIR='${CYCLE_BASEDIR}' -GRID_DIR='${GRID_DIR}' -OROG_DIR='${OROG_DIR}' -SFC_CLIMO_DIR='${SFC_CLIMO_DIR}' - -NDIGITS_ENSMEM_NAMES='${NDIGITS_ENSMEM_NAMES}' -ENSMEM_NAMES=${ensmem_names_str} -FV3_NML_ENSMEM_FPS=${fv3_nml_ensmem_fps_str} -# -#----------------------------------------------------------------------- -# -# Files. -# -#----------------------------------------------------------------------- -# -GLOBAL_VAR_DEFNS_FP='${GLOBAL_VAR_DEFNS_FP}' - -DATA_TABLE_FN='${DATA_TABLE_FN}' -DIAG_TABLE_FN='${DIAG_TABLE_FN}' -FIELD_TABLE_FN='${FIELD_TABLE_FN}' -MODEL_CONFIG_FN='${MODEL_CONFIG_FN}' -NEMS_CONFIG_FN='${NEMS_CONFIG_FN}' - -DATA_TABLE_TMPL_FN='${DATA_TABLE_TMPL_FN}' -DIAG_TABLE_TMPL_FN='${DIAG_TABLE_TMPL_FN}' -FIELD_TABLE_TMPL_FN='${FIELD_TABLE_TMPL_FN}' -MODEL_CONFIG_TMPL_FN='${MODEL_CONFIG_TMPL_FN}' -NEMS_CONFIG_TMPL_FN='${NEMS_CONFIG_TMPL_FN}' - -DATA_TABLE_TMPL_FP='${DATA_TABLE_TMPL_FP}' -DIAG_TABLE_TMPL_FP='${DIAG_TABLE_TMPL_FP}' -FIELD_TABLE_TMPL_FP='${FIELD_TABLE_TMPL_FP}' -FV3_NML_BASE_SUITE_FP='${FV3_NML_BASE_SUITE_FP}' -FV3_NML_YAML_CONFIG_FP='${FV3_NML_YAML_CONFIG_FP}' -FV3_NML_BASE_ENS_FP='${FV3_NML_BASE_ENS_FP}' -MODEL_CONFIG_TMPL_FP='${MODEL_CONFIG_TMPL_FP}' -NEMS_CONFIG_TMPL_FP='${NEMS_CONFIG_TMPL_FP}' - -CCPP_PHYS_SUITE_FN='${CCPP_PHYS_SUITE_FN}' -CCPP_PHYS_SUITE_IN_CCPP_FP='${CCPP_PHYS_SUITE_IN_CCPP_FP}' -CCPP_PHYS_SUITE_FP='${CCPP_PHYS_SUITE_FP}' - -FIELD_DICT_FN='${FIELD_DICT_FN}' -FIELD_DICT_IN_UWM_FP='${FIELD_DICT_IN_UWM_FP}' -FIELD_DICT_FP='${FIELD_DICT_FP}' - -DATA_TABLE_FP='${DATA_TABLE_FP}' -FIELD_TABLE_FP='${FIELD_TABLE_FP}' -FV3_NML_FN='${FV3_NML_FN}' -FV3_NML_FP='${FV3_NML_FP}' -NEMS_CONFIG_FP='${NEMS_CONFIG_FP}' - -FV3_EXEC_FP='${FV3_EXEC_FP}' - -LOAD_MODULES_RUN_TASK_FP='${LOAD_MODULES_RUN_TASK_FP}' - -THOMPSON_MP_CLIMO_FN='${THOMPSON_MP_CLIMO_FN}' -THOMPSON_MP_CLIMO_FP='${THOMPSON_MP_CLIMO_FP}' -# -#----------------------------------------------------------------------- -# -# Flag for creating relative symlinks (as opposed to absolute ones). -# -#----------------------------------------------------------------------- -# -RELATIVE_LINK_FLAG='${RELATIVE_LINK_FLAG}' -# -#----------------------------------------------------------------------- -# -# Parameters that indicate whether or not various parameterizations are -# included in and called by the physics suite. -# -#----------------------------------------------------------------------- -# -SDF_USES_RUC_LSM='${SDF_USES_RUC_LSM}' -SDF_USES_THOMPSON_MP='${SDF_USES_THOMPSON_MP}' -# -#----------------------------------------------------------------------- -# -# Grid configuration parameters needed regardless of grid generation -# method used. -# -#----------------------------------------------------------------------- -# -GTYPE='$GTYPE' -TILE_RGNL='${TILE_RGNL}' - -LON_CTR='${LON_CTR}' -LAT_CTR='${LAT_CTR}' -NX='${NX}' -NY='${NY}' -NHW='${NHW}' -STRETCH_FAC='${STRETCH_FAC}' - -RES_IN_FIXLAM_FILENAMES='${RES_IN_FIXLAM_FILENAMES}' -# -# If running the make_grid task, CRES will be set to a null string during -# the grid generation step. It will later be set to an actual value after -# the make_grid task is complete. -# -CRES='$CRES' -" -# -#----------------------------------------------------------------------- -# -# Append to the variable definitions file the defintions of grid parameters -# that are specific to the grid generation method used. -# -#----------------------------------------------------------------------- -# -grid_vars_str="" -if [ "${GRID_GEN_METHOD}" = "GFDLgrid" ]; then - - grid_vars_str="\ -# -#----------------------------------------------------------------------- -# -# Grid configuration parameters for a regional grid generated from a -# global parent cubed-sphere grid. This is the method originally -# suggested by GFDL since it allows GFDL's nested grid generator to be -# used to generate a regional grid. However, for large regional domains, -# it results in grids that have an unacceptably large range of cell sizes -# (i.e. ratio of maximum to minimum cell size is not sufficiently close -# to 1). -# -#----------------------------------------------------------------------- -# -ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${ISTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' -IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${IEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' -JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${JSTART_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' -JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG='${JEND_OF_RGNL_DOM_WITH_WIDE_HALO_ON_T6SG}' -" - -elif [ "${GRID_GEN_METHOD}" = "ESGgrid" ]; then - - grid_vars_str="\ -# -#----------------------------------------------------------------------- -# -# Grid configuration parameters for a regional grid generated independently -# of a global parent grid. This method was developed by Jim Purser of -# EMC and results in very uniform grids (i.e. ratio of maximum to minimum -# cell size is very close to 1). -# -#----------------------------------------------------------------------- -# -DEL_ANGLE_X_SG='${DEL_ANGLE_X_SG}' -DEL_ANGLE_Y_SG='${DEL_ANGLE_Y_SG}' -NEG_NX_OF_DOM_WITH_WIDE_HALO='${NEG_NX_OF_DOM_WITH_WIDE_HALO}' -NEG_NY_OF_DOM_WITH_WIDE_HALO='${NEG_NY_OF_DOM_WITH_WIDE_HALO}' -PAZI='${PAZI}' -" - -fi -var_defns_file_contents="${var_defns_file_contents}${grid_vars_str}" -# -#----------------------------------------------------------------------- -# -# Continue appending variable definitions to the variable definitions -# file. -# -#----------------------------------------------------------------------- -# -lbc_spec_fcst_hrs_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${LBC_SPEC_FCST_HRS[@]}") -lbc_spec_fcst_hrs_str=$(printf "( %s${escbksl_nl_or_null})" "${lbc_spec_fcst_hrs_str}") - -all_cdates_str=$(printf "${escbksl_nl_or_null}\"%s\" " "${ALL_CDATES[@]}") -all_cdates_str=$(printf "( %s${escbksl_nl_or_null})" "${all_cdates_str}") - -var_defns_file_contents=${var_defns_file_contents}"\ -# -#----------------------------------------------------------------------- -# -# Flag in the \"${MODEL_CONFIG_FN}\" file for coupling the ocean model to -# the weather model. -# -#----------------------------------------------------------------------- -# -CPL='${CPL}' -# -#----------------------------------------------------------------------- -# -# Name of the ozone parameterization. The value this gets set to depends -# on the CCPP physics suite being used. -# -#----------------------------------------------------------------------- -# -OZONE_PARAM='${OZONE_PARAM}' -# -#----------------------------------------------------------------------- -# -# If USE_USER_STAGED_EXTRN_FILES is set to \"FALSE\", this is the system -# directory in which the workflow scripts will look for the files generated -# by the external model specified in EXTRN_MDL_NAME_ICS. These files will -# be used to generate the input initial condition and surface files for -# the FV3-LAM. -# -#----------------------------------------------------------------------- -# -EXTRN_MDL_SYSBASEDIR_ICS='${EXTRN_MDL_SYSBASEDIR_ICS}' -# -#----------------------------------------------------------------------- -# -# If USE_USER_STAGED_EXTRN_FILES is set to \"FALSE\", this is the system -# directory in which the workflow scripts will look for the files generated -# by the external model specified in EXTRN_MDL_NAME_LBCS. These files -# will be used to generate the input lateral boundary condition files for -# the FV3-LAM. -# -#----------------------------------------------------------------------- -# -EXTRN_MDL_SYSBASEDIR_LBCS='${EXTRN_MDL_SYSBASEDIR_LBCS}' -# -#----------------------------------------------------------------------- -# -# Shift back in time (in units of hours) of the starting time of the ex- -# ternal model specified in EXTRN_MDL_NAME_LBCS. -# -#----------------------------------------------------------------------- -# -EXTRN_MDL_LBCS_OFFSET_HRS='${EXTRN_MDL_LBCS_OFFSET_HRS}' -# -#----------------------------------------------------------------------- -# -# Boundary condition update times (in units of forecast hours). Note that -# LBC_SPEC_FCST_HRS is an array, even if it has only one element. -# -#----------------------------------------------------------------------- -# -LBC_SPEC_FCST_HRS=${lbc_spec_fcst_hrs_str} -# -#----------------------------------------------------------------------- -# -# The number of cycles for which to make forecasts and the list of -# starting dates/hours of these cycles. -# -#----------------------------------------------------------------------- -# -NUM_CYCLES='${NUM_CYCLES}' -ALL_CDATES=${all_cdates_str} -# -#----------------------------------------------------------------------- -# -# Parameters that determine whether FVCOM data will be used, and if so, -# their location. -# -# If USE_FVCOM is set to \"TRUE\", then FVCOM data (in the file FVCOM_FILE -# located in the directory FVCOM_DIR) will be used to update the surface -# boundary conditions during the initial conditions generation task -# (MAKE_ICS_TN). -# -#----------------------------------------------------------------------- -# -USE_FVCOM='${USE_FVCOM}' -FVCOM_DIR='${FVCOM_DIR}' -FVCOM_FILE='${FVCOM_FILE}' -# -#----------------------------------------------------------------------- -# -# Computational parameters. -# -#----------------------------------------------------------------------- -# -NCORES_PER_NODE='${NCORES_PER_NODE}' -PE_MEMBER01='${PE_MEMBER01}' -# -#----------------------------------------------------------------------- -# -# IF DO_SPP is set to "TRUE", N_VAR_SPP specifies the number of physics -# parameterizations that are perturbed with SPP. If DO_LSM_SPP is set to -# "TRUE", N_VAR_LNDP specifies the number of LSM parameters that are -# perturbed. LNDP_TYPE determines the way LSM perturbations are employed -# and FHCYC_LSM_SPP_OR_NOT sets FHCYC based on whether LSM perturbations -# are turned on or not. -# -#----------------------------------------------------------------------- -# -N_VAR_SPP='${N_VAR_SPP}' -N_VAR_LNDP='${N_VAR_LNDP}' -LNDP_TYPE='${LNDP_TYPE}' -LNDP_MODEL_TYPE='${LNDP_MODEL_TYPE}' -FHCYC_LSM_SPP_OR_NOT='${FHCYC_LSM_SPP_OR_NOT}' -" -# -# Done with constructing the contents of the variable definitions file, -# so now write the contents to file. -# -printf "%s\n" "${var_defns_file_contents}" >> ${GLOBAL_VAR_DEFNS_FP} - -print_info_msg "$VERBOSE" " -Done generating the global experiment variable definitions file." -# -#----------------------------------------------------------------------- -# -# Print message indicating successful completion of script. -# -#----------------------------------------------------------------------- -# -print_info_msg " -======================================================================== -Function ${func_name}() in \"${scrfunc_fn}\" completed successfully!!! -========================================================================" -# -#----------------------------------------------------------------------- -# -# Restore the shell options saved at the start of this script/function. -# -#----------------------------------------------------------------------- -# -{ restore_shell_opts; } > /dev/null 2>&1 - -} -# -#----------------------------------------------------------------------- -# -# Call the function defined above. -# -#----------------------------------------------------------------------- -# -setup - diff --git a/ush/valid_param_vals.sh b/ush/valid_param_vals.sh deleted file mode 100644 index 2755e5709f..0000000000 --- a/ush/valid_param_vals.sh +++ /dev/null @@ -1,59 +0,0 @@ -# -# Source file containing useful constants. -# -. ./constants.sh -# -# Define valid values for various global experiment/workflow variables. -# -valid_vals_RUN_ENVIR=("nco" "community") -valid_vals_MACHINE=("WCOSS_DELL_P3" "HERA" "ORION" "JET" "ODIN" "CHEYENNE" "STAMPEDE" "LINUX" "MACOS" "NOAACLOUD" "SINGULARITY" "GAEA") -valid_vals_SCHED=("slurm" "pbspro" "lsf" "lsfcray" "none") -valid_vals_FCST_MODEL=("ufs-weather-model" "fv3gfs_aqm") -valid_vals_WORKFLOW_MANAGER=("rocoto" "none") -valid_vals_PREDEF_GRID_NAME=( \ -"RRFS_CONUS_25km" \ -"RRFS_CONUS_13km" \ -"RRFS_CONUS_3km" \ -"RRFS_CONUScompact_25km" \ -"RRFS_CONUScompact_13km" \ -"RRFS_CONUScompact_3km" \ -"RRFS_SUBCONUS_3km" \ -"RRFS_AK_13km" \ -"RRFS_AK_3km" \ -"CONUS_25km_GFDLgrid" \ -"CONUS_3km_GFDLgrid" \ -"EMC_AK" \ -"EMC_HI" \ -"EMC_PR" \ -"EMC_GU" \ -"GSL_HAFSV0.A_25km" \ -"GSL_HAFSV0.A_13km" \ -"GSL_HAFSV0.A_3km" \ -"GSD_HRRR_AK_50km" \ -"RRFS_NA_13km" \ -"RRFS_NA_3km" \ -"SUBCONUS_Ind_3km" \ -"WoFS_3km" \ -) -valid_vals_CCPP_PHYS_SUITE=( \ -"FV3_GFS_2017_gfdlmp" \ -"FV3_GFS_2017_gfdlmp_regional" \ -"FV3_GFS_v15p2" \ -"FV3_GFS_v15_thompson_mynn_lam3km" \ -"FV3_GFS_v16" \ -"FV3_RRFS_v1beta" \ -"FV3_HRRR" \ -) -valid_vals_GFDLgrid_NUM_CELLS=("48" "96" "192" "384" "768" "1152" "3072") -valid_vals_EXTRN_MDL_NAME_ICS=("GSMGFS" "FV3GFS" "RAP" "HRRR" "NAM") -valid_vals_EXTRN_MDL_NAME_LBCS=("GSMGFS" "FV3GFS" "RAP" "HRRR" "NAM") -valid_vals_FV3GFS_FILE_FMT_ICS=("nemsio" "grib2" "netcdf") -valid_vals_FV3GFS_FILE_FMT_LBCS=("nemsio" "grib2" "netcdf") -valid_vals_GRID_GEN_METHOD=("GFDLgrid" "ESGgrid") -valid_vals_PREEXISTING_DIR_METHOD=("delete" "rename" "quit") -valid_vals_GTYPE=("regional") -valid_vals_WRTCMP_output_grid=("rotated_latlon" "lambert_conformal" "regional_latlon") -valid_vals_DOT_OR_USCORE=("." "_") -valid_vals_FVCOM_WCSTART=("warm" "WARM" "cold" "COLD") -valid_vals_COMPILER=("intel" "gnu") -valid_vals_DT_SUBHOURLY_POST_MNTS=("1" "01" "2" "02" "3" "03" "4" "04" "5" "05" "6" "06" "10" "12" "15" "20" "30") diff --git a/ush/valid_param_vals.yaml b/ush/valid_param_vals.yaml index 4c701f777c..64759b5a33 100644 --- a/ush/valid_param_vals.yaml +++ b/ush/valid_param_vals.yaml @@ -42,7 +42,7 @@ valid_vals_CCPP_PHYS_SUITE: [ "FV3_RRFS_v1beta", "FV3_HRRR" ] -valid_vals_GFDLgrid_RES: [48, 96, 192, 384, 768, 1152, 3072] +valid_vals_GFDLgrid_NUM_CELLS: [48, 96, 192, 384, 768, 1152, 3072] valid_vals_EXTRN_MDL_NAME_ICS: ["GSMGFS", "FV3GFS", "RAP", "HRRR", "NAM"] valid_vals_EXTRN_MDL_NAME_LBCS: ["GSMGFS", "FV3GFS", "RAP", "HRRR", "NAM"] valid_vals_USE_USER_STAGED_EXTRN_FILES: [True, False]